Merge branch 'dev' of github.com:davidhalter/jedi into dev

This commit is contained in:
David Halter
2013-07-17 14:01:13 +02:00
21 changed files with 203 additions and 208 deletions

View File

@@ -42,8 +42,8 @@ import sys
# imports and circular imports... Just avoid it: # imports and circular imports... Just avoid it:
sys.path.insert(0, __path__[0]) sys.path.insert(0, __path__[0])
from .api import Script, Interpreter, NotFoundError, set_debug_function, \ from .api import Script, Interpreter, NotFoundError, set_debug_function
preload_module, defined_names from .api import preload_module, defined_names
from . import settings from . import settings
sys.path.pop(0) sys.path.pop(0)

View File

@@ -18,7 +18,7 @@ is_py33 = sys.hexversion >= 0x03030000
def find_module_py33(string, path=None): def find_module_py33(string, path=None):
loader = importlib.machinery.PathFinder.find_module(string, path) loader = importlib.machinery.PathFinder.find_module(string, path)
if loader is None and path is None: # Fallback to find builtins if loader is None and path is None: # Fallback to find builtins
loader = importlib.find_loader(string) loader = importlib.find_loader(string)
if loader is None: if loader is None:

View File

@@ -33,7 +33,6 @@ import builtin
class NotFoundError(Exception): class NotFoundError(Exception):
"""A custom error to avoid catching the wrong exceptions.""" """A custom error to avoid catching the wrong exceptions."""
pass
class Script(object): class Script(object):
@@ -68,7 +67,7 @@ class Script(object):
source_path, source=self.source, position=self.pos) source_path, source=self.source, position=self.pos)
self._source_path = source_path self._source_path = source_path
self.source_path = None if source_path is None \ self.source_path = None if source_path is None \
else os.path.abspath(source_path) else os.path.abspath(source_path)
debug.speed('init') debug.speed('init')
def __repr__(self): def __repr__(self):
@@ -100,7 +99,7 @@ class Script(object):
except NotFoundError: except NotFoundError:
scopes = [] scopes = []
scope_generator = evaluate.get_names_of_scope( scope_generator = evaluate.get_names_of_scope(
self._parser.user_scope, self.pos) self._parser.user_scope, self.pos)
completions = [] completions = []
for scope, name_list in scope_generator: for scope, name_list in scope_generator:
for c in name_list: for c in name_list:
@@ -137,14 +136,14 @@ class Script(object):
bs = builtin.Builtin.scope bs = builtin.Builtin.scope
if isinstance(u, pr.Import): if isinstance(u, pr.Import):
if (u.relative_count > 0 or u.from_ns) and not re.search( if (u.relative_count > 0 or u.from_ns) and not re.search(
r'(,|from)\s*$|import\s+$', completion_line): r'(,|from)\s*$|import\s+$', completion_line):
completions += ((k, bs) for k completions += ((k, bs) for k
in keywords.get_keywords('import')) in keywords.get_keywords('import'))
if not path and not isinstance(u, pr.Import): if not path and not isinstance(u, pr.Import):
# add keywords # add keywords
completions += ((k, bs) for k in keywords.get_keywords( completions += ((k, bs) for k in keywords.get_keywords(
all=True)) all=True))
needs_dot = not dot and path needs_dot = not dot and path
@@ -156,9 +155,9 @@ class Script(object):
and n.lower().startswith(like.lower()) \ and n.lower().startswith(like.lower()) \
or n.startswith(like): or n.startswith(like):
if not evaluate.filter_private_variable(s, if not evaluate.filter_private_variable(s,
self._parser.user_stmt, n): self._parser.user_stmt, n):
new = api_classes.Completion(c, needs_dot, new = api_classes.Completion(c, needs_dot,
len(like), s) len(like), s)
k = (new.name, new.complete) # key k = (new.name, new.complete) # key
if k in comp_dct and settings.no_completion_duplicates: if k in comp_dct and settings.no_completion_duplicates:
comp_dct[k]._same_name_completions.append(new) comp_dct[k]._same_name_completions.append(new)
@@ -329,7 +328,7 @@ class Script(object):
scopes |= keywords.get_keywords(string=goto_path, pos=self.pos) scopes |= keywords.get_keywords(string=goto_path, pos=self.pos)
d = set([api_classes.Definition(s) for s in scopes d = set([api_classes.Definition(s) for s in scopes
if not isinstance(s, imports.ImportPath._GlobalNamespace)]) if not isinstance(s, imports.ImportPath._GlobalNamespace)])
return self._sorted_defs(d) return self._sorted_defs(d)
@api_classes._clear_caches_after_call @api_classes._clear_caches_after_call
@@ -343,7 +342,7 @@ class Script(object):
:rtype: list of :class:`api_classes.Definition` :rtype: list of :class:`api_classes.Definition`
""" """
d = [api_classes.Definition(d) for d in set(self._goto()[0]) d = [api_classes.Definition(d) for d in set(self._goto()[0])
if not isinstance(d, imports.ImportPath._GlobalNamespace)] if not isinstance(d, imports.ImportPath._GlobalNamespace)]
return self._sorted_defs(d) return self._sorted_defs(d)
def _goto(self, add_import_name=False): def _goto(self, add_import_name=False):
@@ -360,7 +359,7 @@ class Script(object):
definitions = set(defs) definitions = set(defs)
for d in defs: for d in defs:
if isinstance(d.parent, pr.Import) \ if isinstance(d.parent, pr.Import) \
and d.start_pos == (0, 0): and d.start_pos == (0, 0):
i = imports.ImportPath(d.parent).follow(is_goto=True) i = imports.ImportPath(d.parent).follow(is_goto=True)
definitions.remove(d) definitions.remove(d)
definitions |= follow_inexistent_imports(i) definitions |= follow_inexistent_imports(i)
@@ -414,14 +413,14 @@ class Script(object):
user_stmt = self._parser.user_stmt user_stmt = self._parser.user_stmt
definitions, search_name = self._goto(add_import_name=True) definitions, search_name = self._goto(add_import_name=True)
if isinstance(user_stmt, pr.Statement) \ if isinstance(user_stmt, pr.Statement) \
and self.pos < user_stmt.get_commands()[0].start_pos: and self.pos < user_stmt.get_commands()[0].start_pos:
# the search_name might be before `=` # the search_name might be before `=`
definitions = [v for v in user_stmt.set_vars definitions = [v for v in user_stmt.set_vars
if unicode(v.names[-1]) == search_name] if unicode(v.names[-1]) == search_name]
if not isinstance(user_stmt, pr.Import): if not isinstance(user_stmt, pr.Import):
# import case is looked at with add_import_name option # import case is looked at with add_import_name option
definitions = dynamic.usages_add_import_modules(definitions, definitions = dynamic.usages_add_import_modules(definitions,
search_name) search_name)
module = set([d.get_parent_until() for d in definitions]) module = set([d.get_parent_until() for d in definitions])
module.add(self._parser.module) module.add(self._parser.module)
@@ -472,7 +471,7 @@ class Script(object):
user_stmt = self._parser.user_stmt user_stmt = self._parser.user_stmt
if user_stmt is not None and isinstance(user_stmt, pr.Statement): if user_stmt is not None and isinstance(user_stmt, pr.Statement):
call, index, _ = helpers.search_function_definition( call, index, _ = helpers.search_function_definition(
user_stmt, self.pos) user_stmt, self.pos)
debug.speed('func_call parsed') debug.speed('func_call parsed')
return call, index return call, index
@@ -493,7 +492,7 @@ class Script(object):
kill_count += 1 kill_count += 1
i = imports.ImportPath(user_stmt, is_like_search, i = imports.ImportPath(user_stmt, is_like_search,
kill_count=kill_count, direct_resolve=True) kill_count=kill_count, direct_resolve=True)
return i, cur_name_part return i, cur_name_part
def _get_completion_parts(self, path): def _get_completion_parts(self, path):
@@ -582,7 +581,7 @@ def preload_module(*modules):
def set_debug_function(func_cb=debug.print_to_stdout, warnings=True, def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,
notices=True, speed=True): notices=True, speed=True):
""" """
Define a callback debug function to get all the debug messages. Define a callback debug function to get all the debug messages.

View File

@@ -344,7 +344,7 @@ class Completion(BaseDefinition):
dot = '.' if self._needs_dot else '' dot = '.' if self._needs_dot else ''
append = '' append = ''
if settings.add_bracket_after_function \ if settings.add_bracket_after_function \
and self.type == 'Function': and self.type == 'Function':
append = '(' append = '('
if settings.add_dot_after_module: if settings.add_dot_after_module:
@@ -376,7 +376,6 @@ class Completion(BaseDefinition):
warnings.warn("Use name instead.", DeprecationWarning) warnings.warn("Use name instead.", DeprecationWarning)
return self.name return self.name
@property @property
def description(self): def description(self):
""" """
@@ -414,7 +413,7 @@ class Completion(BaseDefinition):
return [self] return [self]
self._followed_definitions = \ self._followed_definitions = \
[BaseDefinition(d, d.start_pos) for d in defs] [BaseDefinition(d, d.start_pos) for d in defs]
_clear_caches() _clear_caches()
return self._followed_definitions return self._followed_definitions
@@ -504,7 +503,7 @@ class Definition(BaseDefinition):
`module.class.function` path. `module.class.function` path.
""" """
if self.module_path.endswith('.py') \ if self.module_path.endswith('.py') \
and not isinstance(self._definition, pr.Module): and not isinstance(self._definition, pr.Module):
position = '@%s' % (self.line) position = '@%s' % (self.line)
else: else:
# is a builtin or module # is a builtin or module
@@ -601,4 +600,4 @@ class CallDef(object):
def __repr__(self): def __repr__(self):
return '<%s: %s index %s>' % (type(self).__name__, self._executable, return '<%s: %s index %s>' % (type(self).__name__, self._executable,
self.index) self.index)

View File

@@ -233,7 +233,7 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
if is_in_base_classes(scope, n, exe): if is_in_base_classes(scope, n, exe):
continue continue
if inspect.isbuiltin(exe) or inspect.ismethod(exe) \ if inspect.isbuiltin(exe) or inspect.ismethod(exe) \
or inspect.ismethoddescriptor(exe): or inspect.ismethoddescriptor(exe):
funcs[n] = exe funcs[n] = exe
elif inspect.isclass(exe) or inspect.ismodule(exe): elif inspect.isclass(exe) or inspect.ismodule(exe):
classes[n] = exe classes[n] = exe
@@ -254,15 +254,15 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
code += get_doc(scope) code += get_doc(scope)
names = set(dir(scope)) - set(['__file__', '__name__', '__doc__', names = set(dir(scope)) - set(['__file__', '__name__', '__doc__',
'__path__', '__package__']) \ '__path__', '__package__']) \
| set(['mro']) | set(['mro'])
classes, funcs, stmts, members = get_scope_objects(names) classes, funcs, stmts, members = get_scope_objects(names)
# classes # classes
for name, cl in classes.items(): for name, cl in classes.items():
bases = (c.__name__ for c in cl.__bases__) if inspect.isclass(cl) \ bases = (c.__name__ for c in cl.__bases__) if inspect.isclass(cl) \
else [] else []
code += 'class %s(%s):\n' % (name, ','.join(bases)) code += 'class %s(%s):\n' % (name, ','.join(bases))
if depth == 0: if depth == 0:
try: try:
@@ -321,7 +321,7 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
file_type = io.TextIOWrapper file_type = io.TextIOWrapper
else: else:
file_type = types.FileType file_type = types.FileType
if type(value) == file_type: if isinstance(value, file_type):
value = 'open()' value = 'open()'
elif name == 'None': elif name == 'None':
value = '' value = ''
@@ -336,13 +336,6 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
value = '%s.%s' % (mod, value) value = '%s.%s' % (mod, value)
code += '%s = %s\n' % (name, value) code += '%s = %s\n' % (name, value)
if depth == 0:
#with open('writeout.py', 'w') as f:
# f.write(code)
#import sys
#sys.stdout.write(code)
#exit()
pass
return code return code
@@ -378,7 +371,7 @@ def _parse_function_doc(func):
return ','.join(args) return ','.join(args)
while True: while True:
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]', param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
change_options, param_str) change_options, param_str)
if changes == 0: if changes == 0:
break break
except (ValueError, AttributeError): except (ValueError, AttributeError):

View File

@@ -147,8 +147,8 @@ class NoErrorTokenizer(object):
and self.previous[0] in (tokenize.INDENT, tokenize.NL, None, and self.previous[0] in (tokenize.INDENT, tokenize.NL, None,
tokenize.NEWLINE, tokenize.DEDENT) \ tokenize.NEWLINE, tokenize.DEDENT) \
and c[0] not in (tokenize.COMMENT, tokenize.INDENT, and c[0] not in (tokenize.COMMENT, tokenize.INDENT,
tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT): tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT):
#print c, tokenize.tok_name[c[0]] # print c, tokenize.tok_name[c[0]]
tok = c[1] tok = c[1]
indent = c[2][1] indent = c[2][1]

View File

@@ -62,4 +62,4 @@ def print_to_stdout(level, str_out):
print(col + str_out + Fore.RESET) print(col + str_out + Fore.RESET)
#debug_function = print_to_stdout # debug_function = print_to_stdout

View File

@@ -27,8 +27,8 @@ DOCSTRING_PARAM_PATTERNS = [
] ]
DOCSTRING_RETURN_PATTERNS = [ DOCSTRING_RETURN_PATTERNS = [
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
] ]
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`') REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
@@ -37,7 +37,7 @@ REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
@cache.memoize_default() @cache.memoize_default()
def follow_param(param): def follow_param(param):
func = param.parent_function func = param.parent_function
#print func, param, param.parent_function # print func, param, param.parent_function
param_str = _search_param_in_docstr(func.docstr, str(param.get_name())) param_str = _search_param_in_docstr(func.docstr, str(param.get_name()))
user_position = (1, 0) user_position = (1, 0)
@@ -52,7 +52,7 @@ def follow_param(param):
user_position = (2, 0) user_position = (2, 0)
p = parsing.Parser(param_str, None, user_position, p = parsing.Parser(param_str, None, user_position,
no_docstr=True) no_docstr=True)
return evaluate.follow_statement(p.user_stmt) return evaluate.follow_statement(p.user_stmt)
return [] return []

View File

@@ -334,7 +334,7 @@ def _check_array_additions(compare_array, module, is_list):
settings.dynamic_params_for_other_modules = False settings.dynamic_params_for_other_modules = False
search_names = ['append', 'extend', 'insert'] if is_list else \ search_names = ['append', 'extend', 'insert'] if is_list else \
['add', 'update'] ['add', 'update']
comp_arr_parent = get_execution_parent(compare_array, er.Execution) comp_arr_parent = get_execution_parent(compare_array, er.Execution)
possible_stmts = [] possible_stmts = []
@@ -351,7 +351,7 @@ def _check_array_additions(compare_array, module, is_list):
# literally copy the contents of a function. # literally copy the contents of a function.
if isinstance(comp_arr_parent, er.Execution): if isinstance(comp_arr_parent, er.Execution):
stmt = comp_arr_parent. \ stmt = comp_arr_parent. \
get_statement_for_position(stmt.start_pos) get_statement_for_position(stmt.start_pos)
if stmt is None: if stmt is None:
continue continue
# InstanceElements are special, because they don't get copied, # InstanceElements are special, because they don't get copied,
@@ -403,7 +403,9 @@ class ArrayInstance(pr.Base):
if self.var_args.start_pos != array.var_args.start_pos: if self.var_args.start_pos != array.var_args.start_pos:
items += array.iter_content() items += array.iter_content()
else: else:
debug.warning('ArrayInstance recursion', self.var_args) debug.warning(
'ArrayInstance recursion',
self.var_args)
continue continue
items += evaluate.get_iterator_types([typ]) items += evaluate.get_iterator_types([typ])
@@ -472,7 +474,7 @@ def usages(definitions, search_name, mods):
for used_count, name_part in imps: for used_count, name_part in imps:
i = imports.ImportPath(stmt, kill_count=count - used_count, i = imports.ImportPath(stmt, kill_count=count - used_count,
direct_resolve=True) direct_resolve=True)
f = i.follow(is_goto=True) f = i.follow(is_goto=True)
if set(f) & set(definitions): if set(f) & set(definitions):
names.append(api_classes.Usage(name_part, stmt)) names.append(api_classes.Usage(name_part, stmt))
@@ -524,7 +526,7 @@ def check_statement_information(stmt, search_name):
# this might be removed if we analyze and, etc # this might be removed if we analyze and, etc
assert len(commands) == 1 assert len(commands) == 1
call = commands[0] call = commands[0]
assert type(call) == pr.Call and str(call.name) == 'isinstance' assert type(call) is pr.Call and str(call.name) == 'isinstance'
assert bool(call.execution) assert bool(call.execution)
# isinstance check # isinstance check

View File

@@ -105,8 +105,8 @@ def get_defined_names_for_position(scope, position=None, start_scope=None):
# Instances have special rules, always return all the possible completions, # Instances have special rules, always return all the possible completions,
# because class variables are always valid and the `self.` variables, too. # because class variables are always valid and the `self.` variables, too.
if (not position or isinstance(scope, (er.Array, er.Instance)) if (not position or isinstance(scope, (er.Array, er.Instance))
or start_scope != scope or start_scope != scope
and isinstance(start_scope, (pr.Function, er.Execution))): and isinstance(start_scope, (pr.Function, er.Execution))):
return names return names
names_new = [] names_new = []
for n in names: for n in names:
@@ -116,7 +116,7 @@ def get_defined_names_for_position(scope, position=None, start_scope=None):
def get_names_of_scope(scope, position=None, star_search=True, def get_names_of_scope(scope, position=None, star_search=True,
include_builtin=True): include_builtin=True):
""" """
Get all completions (names) possible for the current scope. Get all completions (names) possible for the current scope.
The star search option is only here to provide an optimization. Otherwise The star search option is only here to provide an optimization. Otherwise
@@ -168,17 +168,16 @@ def get_names_of_scope(scope, position=None, star_search=True,
# Ignore the Flows, because the classes and functions care for that. # Ignore the Flows, because the classes and functions care for that.
# InstanceElement of Class is ignored, if it is not the start scope. # InstanceElement of Class is ignored, if it is not the start scope.
if not (scope != non_flow and scope.isinstance(pr.Class) if not (scope != non_flow and scope.isinstance(pr.Class)
or scope.isinstance(pr.Flow) or scope.isinstance(pr.Flow)
or scope.isinstance(er.Instance) or scope.isinstance(er.Instance)
and non_flow.isinstance(er.Function) and non_flow.isinstance(er.Function)):
):
try: try:
if isinstance(scope, er.Instance): if isinstance(scope, er.Instance):
for g in scope.scope_generator(): for g in scope.scope_generator():
yield g yield g
else: else:
yield scope, get_defined_names_for_position(scope, yield scope, get_defined_names_for_position(scope,
position, in_func_scope) position, in_func_scope)
except StopIteration: except StopIteration:
reraise(common.MultiLevelStopIteration, sys.exc_info()[2]) reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
if scope.isinstance(pr.ForFlow) and scope.is_list_comp: if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
@@ -204,7 +203,7 @@ def get_names_of_scope(scope, position=None, star_search=True,
def find_name(scope, name_str, position=None, search_global=False, def find_name(scope, name_str, position=None, search_global=False,
is_goto=False): is_goto=False):
""" """
This is the search function. The most important part to debug. This is the search function. The most important part to debug.
`remove_statements` and `filter_statements` really are the core part of `remove_statements` and `filter_statements` really are the core part of
@@ -273,8 +272,8 @@ def find_name(scope, name_str, position=None, search_global=False,
if check_instance is not None: if check_instance is not None:
# class renames # class renames
add = [er.InstanceElement(check_instance, a, True) add = [er.InstanceElement(check_instance, a, True)
if isinstance(a, (er.Function, pr.Function)) if isinstance(a, (er.Function, pr.Function))
else a for a in add] else a for a in add]
res_new += add res_new += add
else: else:
if isinstance(r, pr.Class): if isinstance(r, pr.Class):
@@ -349,7 +348,7 @@ def find_name(scope, name_str, position=None, search_global=False,
# Compare start_pos, because names may be different # Compare start_pos, because names may be different
# because of executions. # because of executions.
if c.name.start_pos == name.start_pos \ if c.name.start_pos == name.start_pos \
and c.execution: and c.execution:
return True return True
return False return False
@@ -368,7 +367,7 @@ def find_name(scope, name_str, position=None, search_global=False,
# TODO this makes self variables non-breakable. wanted? # TODO this makes self variables non-breakable. wanted?
if isinstance(name, er.InstanceElement) \ if isinstance(name, er.InstanceElement) \
and not name.is_class_var: and not name.is_class_var:
no_break_scope = True no_break_scope = True
result.append(par) result.append(par)
@@ -387,7 +386,7 @@ def find_name(scope, name_str, position=None, search_global=False,
for name in sorted(name_list, key=comparison_func, reverse=True): for name in sorted(name_list, key=comparison_func, reverse=True):
p = name.parent.parent if name.parent else None p = name.parent.parent if name.parent else None
if isinstance(p, er.InstanceElement) \ if isinstance(p, er.InstanceElement) \
and isinstance(p.var, pr.Class): and isinstance(p.var, pr.Class):
p = p.var p = p.var
if name_str == name.get_code() and p not in break_scopes: if name_str == name.get_code() and p not in break_scopes:
r, no_break_scope = process(name) r, no_break_scope = process(name)
@@ -410,7 +409,7 @@ def find_name(scope, name_str, position=None, search_global=False,
while flow_scope: while flow_scope:
# TODO check if result is in scope -> no evaluation necessary # TODO check if result is in scope -> no evaluation necessary
n = dynamic.check_flow_information(flow_scope, name_str, n = dynamic.check_flow_information(flow_scope, name_str,
position) position)
if n: if n:
result = n result = n
break break
@@ -428,7 +427,7 @@ def find_name(scope, name_str, position=None, search_global=False,
# __getattr__ / __getattribute__ # __getattr__ / __getattribute__
result += check_getattr(nscope, name_str) result += check_getattr(nscope, name_str)
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s' % (name_str, scope, debug.dbg('sfn filter "%s" in (%s-%s): %s@%s' % (name_str, scope,
nscope, result, position)) nscope, result, position))
return result return result
def descriptor_check(result): def descriptor_check(result):
@@ -436,7 +435,7 @@ def find_name(scope, name_str, position=None, search_global=False,
res_new = [] res_new = []
for r in result: for r in result:
if isinstance(scope, (er.Instance, er.Class)) \ if isinstance(scope, (er.Instance, er.Class)) \
and hasattr(r, 'get_descriptor_return'): and hasattr(r, 'get_descriptor_return'):
# handle descriptors # handle descriptors
with common.ignored(KeyError): with common.ignored(KeyError):
res_new += r.get_descriptor_return(scope) res_new += r.get_descriptor_return(scope)
@@ -537,7 +536,7 @@ def assign_tuples(tup, results, seek_name):
func = r.get_exact_index_types func = r.get_exact_index_types
except AttributeError: except AttributeError:
debug.warning("invalid tuple lookup %s of result %s in %s" debug.warning("invalid tuple lookup %s of result %s in %s"
% (tup, results, seek_name)) % (tup, results, seek_name))
else: else:
with common.ignored(IndexError): with common.ignored(IndexError):
types += func(index) types += func(index)
@@ -643,7 +642,7 @@ def follow_call_list(call_list, follow_array=False):
call_path = call.generate_call_path() call_path = call.generate_call_path()
next(call_path, None) # the first one has been used already next(call_path, None) # the first one has been used already
result += follow_paths(call_path, r, call.parent, result += follow_paths(call_path, r, call.parent,
position=call.start_pos) position=call.start_pos)
elif isinstance(call, pr.ListComprehension): elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call) loop = evaluate_list_comprehension(call)
# Caveat: parents are being changed, but this doesn't matter, # Caveat: parents are being changed, but this doesn't matter,
@@ -655,7 +654,7 @@ def follow_call_list(call_list, follow_array=False):
result.append(er.Function(call)) result.append(er.Function(call))
# With things like params, these can also be functions... # With things like params, these can also be functions...
elif isinstance(call, (er.Function, er.Class, er.Instance, elif isinstance(call, (er.Function, er.Class, er.Instance,
dynamic.ArrayInstance)): dynamic.ArrayInstance)):
result.append(call) result.append(call)
# The string tokens are just operations (+, -, etc.) # The string tokens are just operations (+, -, etc.)
elif not isinstance(call, (str, unicode)): elif not isinstance(call, (str, unicode)):
@@ -673,8 +672,8 @@ def follow_call_list(call_list, follow_array=False):
result += follow_call(call) result += follow_call(call)
elif call == '*': elif call == '*':
if [r for r in result if isinstance(r, er.Array) if [r for r in result if isinstance(r, er.Array)
or isinstance(r, er.Instance) or isinstance(r, er.Instance)
and str(r.name) == 'str']: and str(r.name) == 'str']:
# if it is an iterable, ignore * operations # if it is an iterable, ignore * operations
next(calls_iterator) next(calls_iterator)
return set(result) return set(result)
@@ -701,7 +700,7 @@ def follow_call_path(path, scope, position):
if isinstance(current, pr.NamePart): if isinstance(current, pr.NamePart):
# This is the first global lookup. # This is the first global lookup.
scopes = find_name(scope, current, position=position, scopes = find_name(scope, current, position=position,
search_global=True) search_global=True)
else: else:
if current.type in (pr.Call.STRING, pr.Call.NUMBER): if current.type in (pr.Call.STRING, pr.Call.NUMBER):
t = type(current.name).__name__ t = type(current.name).__name__
@@ -775,7 +774,7 @@ def follow_path(path, scope, call_scope, position=None):
if filter_private_variable(scope, call_scope, current): if filter_private_variable(scope, call_scope, current):
return [] return []
result = imports.strip_imports(find_name(scope, current, result = imports.strip_imports(find_name(scope, current,
position=position)) position=position))
return follow_paths(path, set(result), call_scope, position=position) return follow_paths(path, set(result), call_scope, position=position)
@@ -814,5 +813,5 @@ def goto(stmt, call_path=None):
follow_res = [] follow_res = []
for s in scopes: for s in scopes:
follow_res += find_name(s, search, pos, follow_res += find_name(s, search, pos,
search_global=search_global, is_goto=True) search_global=search_global, is_goto=True)
return follow_res, search return follow_res, search

View File

@@ -53,11 +53,13 @@ class Executable(pr.IsScope):
class Instance(use_metaclass(cache.CachedMetaClass, Executable)): class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
""" This class is used to evaluate instances. """ """ This class is used to evaluate instances. """
def __init__(self, base, var_args=()): def __init__(self, base, var_args=()):
super(Instance, self).__init__(base, var_args) super(Instance, self).__init__(base, var_args)
if str(base.name) in ['list', 'set'] \ if str(base.name) in ['list', 'set'] \
and builtin.Builtin.scope == base.get_parent_until(): and builtin.Builtin.scope == base.get_parent_until():
# compare the module path with the builtin name. # compare the module path with the builtin name.
self.var_args = dynamic.check_array_instances(self) self.var_args = dynamic.check_array_instances(self)
else: else:
@@ -166,14 +168,14 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
def __getattr__(self, name): def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'name', 'get_imports', if name not in ['start_pos', 'end_pos', 'name', 'get_imports',
'doc', 'docstr', 'asserts']: 'doc', 'docstr', 'asserts']:
raise AttributeError("Instance %s: Don't touch this (%s)!" raise AttributeError("Instance %s: Don't touch this (%s)!"
% (self, name)) % (self, name))
return getattr(self.base, name) return getattr(self.base, name)
def __repr__(self): def __repr__(self):
return "<e%s of %s (var_args: %s)>" % \ return "<e%s of %s (var_args: %s)>" % \
(type(self).__name__, self.base, len(self.var_args or [])) (type(self).__name__, self.base, len(self.var_args or []))
class InstanceElement(use_metaclass(cache.CachedMetaClass)): class InstanceElement(use_metaclass(cache.CachedMetaClass)):
@@ -195,8 +197,8 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass)):
def parent(self): def parent(self):
par = self.var.parent par = self.var.parent
if isinstance(par, Class) and par == self.instance.base \ if isinstance(par, Class) and par == self.instance.base \
or isinstance(par, pr.Class) \ or isinstance(par, pr.Class) \
and par == self.instance.base.base: and par == self.instance.base.base:
par = self.instance par = self.instance
elif not isinstance(par, pr.Module): elif not isinstance(par, pr.Module):
par = InstanceElement(self.instance, par, self.is_class_var) par = InstanceElement(self.instance, par, self.is_class_var)
@@ -285,8 +287,8 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
def __getattr__(self, name): def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr', if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
'doc', 'get_imports', 'get_parent_until', 'get_code', 'doc', 'get_imports', 'get_parent_until', 'get_code',
'subscopes']: 'subscopes']:
raise AttributeError("Don't touch this: %s of %s !" % (name, self)) raise AttributeError("Don't touch this: %s of %s !" % (name, self))
return getattr(self.base, name) return getattr(self.base, name)
@@ -319,11 +321,11 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
dec_results = evaluate.follow_statement(dec) dec_results = evaluate.follow_statement(dec)
if not len(dec_results): if not len(dec_results):
debug.warning('decorator func not found: %s in stmt %s' % debug.warning('decorator func not found: %s in stmt %s' %
(self.base_func, dec)) (self.base_func, dec))
return None return None
if len(dec_results) > 1: if len(dec_results) > 1:
debug.warning('multiple decorators found', self.base_func, debug.warning('multiple decorators found', self.base_func,
dec_results) dec_results)
decorator = dec_results.pop() decorator = dec_results.pop()
# Create param array. # Create param array.
old_func = Function(f, is_decorated=True) old_func = Function(f, is_decorated=True)
@@ -334,7 +336,7 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
return None return None
if len(wrappers) > 1: if len(wrappers) > 1:
debug.warning('multiple wrappers found', self.base_func, debug.warning('multiple wrappers found', self.base_func,
wrappers) wrappers)
# This is here, that the wrapper gets executed. # This is here, that the wrapper gets executed.
f = wrappers[0] f = wrappers[0]
@@ -415,7 +417,7 @@ class Execution(Executable):
debug.warning('jedi getattr is too simple') debug.warning('jedi getattr is too simple')
key = arr_name.var_args[0] key = arr_name.var_args[0]
stmts += evaluate.follow_path(iter([key]), obj, stmts += evaluate.follow_path(iter([key]), obj,
self.base) self.base)
return stmts return stmts
elif func_name == 'type': elif func_name == 'type':
# otherwise it would be a metaclass # otherwise it would be a metaclass
@@ -428,7 +430,7 @@ class Execution(Executable):
func = self.var_args.get_parent_until(accept) func = self.var_args.get_parent_until(accept)
if func.isinstance(*accept): if func.isinstance(*accept):
cls = func.get_parent_until(accept + (pr.Class,), cls = func.get_parent_until(accept + (pr.Class,),
include_current=False) include_current=False)
if isinstance(cls, pr.Class): if isinstance(cls, pr.Class):
cls = Class(cls) cls = Class(cls)
su = cls.get_super_classes() su = cls.get_super_classes()
@@ -450,7 +452,7 @@ class Execution(Executable):
if hasattr(self.base, 'execute_subscope_by_name'): if hasattr(self.base, 'execute_subscope_by_name'):
try: try:
stmts = self.base.execute_subscope_by_name('__call__', stmts = self.base.execute_subscope_by_name('__call__',
self.var_args) self.var_args)
except KeyError: except KeyError:
debug.warning("no __call__ func available", self.base) debug.warning("no __call__ func available", self.base)
else: else:
@@ -554,7 +556,7 @@ class Execution(Executable):
else: else:
keys_used.add(str(key)) keys_used.add(str(key))
result.append(gen_param_name_copy(key_param, result.append(gen_param_name_copy(key_param,
values=[value])) values=[value]))
key, value = next(var_arg_iterator, (None, None)) key, value = next(var_arg_iterator, (None, None))
commands = param.get_commands() commands = param.get_commands()
@@ -599,7 +601,7 @@ class Execution(Executable):
if not ignore_creation and (not keys_only or commands[0] == '**'): if not ignore_creation and (not keys_only or commands[0] == '**'):
keys_used.add(str(key)) keys_used.add(str(key))
result.append(gen_param_name_copy(param, keys=keys, result.append(gen_param_name_copy(param, keys=keys,
values=values, array_type=array_type)) values=values, array_type=array_type))
if keys_only: if keys_only:
# sometimes param arguments are not completely written (which would # sometimes param arguments are not completely written (which would
@@ -641,7 +643,7 @@ class Execution(Executable):
call = key_stmt.get_commands()[0] call = key_stmt.get_commands()[0]
if isinstance(call, pr.Name): if isinstance(call, pr.Name):
yield call, value_stmt yield call, value_stmt
elif type(call) == pr.Call: elif type(call) is pr.Call:
yield call.name, value_stmt yield call.name, value_stmt
# Normal arguments (including key arguments). # Normal arguments (including key arguments).
else: else:
@@ -732,7 +734,7 @@ class Execution(Executable):
def __repr__(self): def __repr__(self):
return "<%s of %s>" % \ return "<%s of %s>" % \
(type(self).__name__, self.base) (type(self).__name__, self.base)
class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)): class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
@@ -752,7 +754,7 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
executes_generator = ('__next__', 'send') executes_generator = ('__next__', 'send')
for n in ('close', 'throw') + executes_generator: for n in ('close', 'throw') + executes_generator:
name = pr.Name(builtin.Builtin.scope, [(n, none_pos)], name = pr.Name(builtin.Builtin.scope, [(n, none_pos)],
none_pos, none_pos) none_pos, none_pos)
if n in executes_generator: if n in executes_generator:
name.parent = self name.parent = self
names.append(name) names.append(name)
@@ -769,10 +771,10 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
def __getattr__(self, name): def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'parent', 'get_imports', if name not in ['start_pos', 'end_pos', 'parent', 'get_imports',
'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code', 'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code',
'subscopes']: 'subscopes']:
raise AttributeError("Accessing %s of %s is not allowed." raise AttributeError("Accessing %s of %s is not allowed."
% (self, name)) % (self, name))
return getattr(self.func, name) return getattr(self.func, name)
def __repr__(self): def __repr__(self):
@@ -800,8 +802,8 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
# otherwise it just ignores the index (e.g. [1+1]). # otherwise it just ignores the index (e.g. [1+1]).
index = index_possibilities[0] index = index_possibilities[0]
if isinstance(index, Instance) \ if isinstance(index, Instance) \
and str(index.name) in ['int', 'str'] \ and str(index.name) in ['int', 'str'] \
and len(index.var_args) == 1: and len(index.var_args) == 1:
with common.ignored(KeyError, IndexError): with common.ignored(KeyError, IndexError):
return self.get_exact_index_types(index.var_args[0]) return self.get_exact_index_types(index.var_args[0])

View File

@@ -55,7 +55,7 @@ class Module(pr.Simple, pr.Module):
def __repr__(self): def __repr__(self):
return "<%s: %s@%s-%s>" % (type(self).__name__, self.name, return "<%s: %s@%s-%s>" % (type(self).__name__, self.name,
self.start_pos[0], self.end_pos[0]) self.start_pos[0], self.end_pos[0])
class CachedFastParser(type): class CachedFastParser(type):
@@ -67,7 +67,7 @@ class CachedFastParser(type):
pi = cache.parser_cache.get(module_path, None) pi = cache.parser_cache.get(module_path, None)
if pi is None or isinstance(pi.parser, parsing.Parser): if pi is None or isinstance(pi.parser, parsing.Parser):
p = super(CachedFastParser, self).__call__(source, module_path, p = super(CachedFastParser, self).__call__(source, module_path,
user_position) user_position)
else: else:
p = pi.parser # pi is a `cache.ParserCacheItem` p = pi.parser # pi is a `cache.ParserCacheItem`
p.update(source, user_position) p.update(source, user_position)
@@ -204,7 +204,7 @@ class FastParser(use_metaclass(CachedFastParser)):
self._user_scope = p.user_scope self._user_scope = p.user_scope
if isinstance(self._user_scope, pr.SubModule) \ if isinstance(self._user_scope, pr.SubModule) \
or self._user_scope is None: or self._user_scope is None:
self._user_scope = self.module self._user_scope = self.module
return self._user_scope return self._user_scope
@@ -324,11 +324,11 @@ class FastParser(use_metaclass(CachedFastParser)):
if self.current_node is not None: if self.current_node is not None:
self.current_node = \ self.current_node = \
self.current_node.parent_until_indent(indent) self.current_node.parent_until_indent(indent)
nodes += self.current_node.old_children nodes += self.current_node.old_children
# check if code_part has already been parsed # check if code_part has already been parsed
#print '#'*45,line_offset, p and p.end_pos, '\n', code_part # print '#'*45,line_offset, p and p.end_pos, '\n', code_part
p, node = self._get_parser(code_part, code[start:], p, node = self._get_parser(code_part, code[start:],
line_offset, nodes, not is_first) line_offset, nodes, not is_first)
@@ -351,12 +351,12 @@ class FastParser(use_metaclass(CachedFastParser)):
else: else:
if node is None: if node is None:
self.current_node = \ self.current_node = \
self.current_node.add_parser(p, code_part) self.current_node.add_parser(p, code_part)
else: else:
self.current_node = self.current_node.add_node(node) self.current_node = self.current_node.add_node(node)
if self.current_node.parent and (isinstance(p.user_scope, if self.current_node.parent and (isinstance(p.user_scope,
pr.SubModule) or p.user_scope is None) \ pr.SubModule) or p.user_scope is None) \
and self.user_position \ and self.user_position \
and p.start_pos <= self.user_position < p.end_pos: and p.start_pos <= self.user_position < p.end_pos:
p.user_scope = self.current_node.parent.content_scope p.user_scope = self.current_node.parent.content_scope
@@ -365,7 +365,7 @@ class FastParser(use_metaclass(CachedFastParser)):
is_first = False is_first = False
else: else:
#print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part # print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part
pass pass
line_offset += lines line_offset += lines
@@ -378,7 +378,7 @@ class FastParser(use_metaclass(CachedFastParser)):
self.module.end_pos = self.parsers[-1].end_pos self.module.end_pos = self.parsers[-1].end_pos
#print(self.parsers[0].module.get_code()) # print(self.parsers[0].module.get_code())
del code del code
def _get_parser(self, code, parser_code, line_offset, nodes, no_docstr): def _get_parser(self, code, parser_code, line_offset, nodes, no_docstr):
@@ -408,7 +408,7 @@ class FastParser(use_metaclass(CachedFastParser)):
# It's important to take care of the whole user # It's important to take care of the whole user
# positioning stuff, if no reparsing is being done. # positioning stuff, if no reparsing is being done.
p.user_stmt = m.get_statement_for_position( p.user_stmt = m.get_statement_for_position(
self.user_position, include_imports=True) self.user_position, include_imports=True)
if p.user_stmt: if p.user_stmt:
p.user_scope = p.user_stmt.parent p.user_scope = p.user_stmt.parent
else: else:

View File

@@ -56,7 +56,7 @@ class ImportPath(pr.Base):
GlobalNamespace = _GlobalNamespace() GlobalNamespace = _GlobalNamespace()
def __init__(self, import_stmt, is_like_search=False, kill_count=0, def __init__(self, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False): direct_resolve=False):
self.import_stmt = import_stmt self.import_stmt = import_stmt
self.is_like_search = is_like_search self.is_like_search = is_like_search
self.direct_resolve = direct_resolve self.direct_resolve = direct_resolve
@@ -88,8 +88,8 @@ class ImportPath(pr.Base):
import foo.bar import foo.bar
""" """
return not self.import_stmt.alias and not self.import_stmt.from_ns \ return not self.import_stmt.alias and not self.import_stmt.from_ns \
and len(self.import_stmt.namespace.names) > 1 \ and len(self.import_stmt.namespace.names) > 1 \
and not self.direct_resolve and not self.direct_resolve
def get_nested_import(self, parent): def get_nested_import(self, parent):
""" """
@@ -127,14 +127,14 @@ class ImportPath(pr.Base):
names += m.parser.module.get_defined_names() names += m.parser.module.get_defined_names()
else: else:
if on_import_stmt and isinstance(scope, pr.Module) \ if on_import_stmt and isinstance(scope, pr.Module) \
and scope.path.endswith('__init__.py'): and scope.path.endswith('__init__.py'):
pkg_path = os.path.dirname(scope.path) pkg_path = os.path.dirname(scope.path)
names += self.get_module_names([pkg_path]) names += self.get_module_names([pkg_path])
for s, scope_names in evaluate.get_names_of_scope(scope, for s, scope_names in evaluate.get_names_of_scope(scope,
include_builtin=False): include_builtin=False):
for n in scope_names: for n in scope_names:
if self.import_stmt.from_ns is None \ if self.import_stmt.from_ns is None \
or self.is_partial_import: or self.is_partial_import:
# from_ns must be defined to access module # from_ns must be defined to access module
# values plus a partial import means that there # values plus a partial import means that there
# is something after the import, which # is something after the import, which
@@ -155,7 +155,7 @@ class ImportPath(pr.Base):
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path): for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
inf_pos = (float('inf'), float('inf')) inf_pos = (float('inf'), float('inf'))
names.append(pr.Name(self.GlobalNamespace, [(name, inf_pos)], names.append(pr.Name(self.GlobalNamespace, [(name, inf_pos)],
inf_pos, inf_pos, self.import_stmt)) inf_pos, inf_pos, self.import_stmt))
return names return names
def sys_path_with_modifications(self): def sys_path_with_modifications(self):
@@ -198,12 +198,12 @@ class ImportPath(pr.Base):
elif rest: elif rest:
if is_goto: if is_goto:
scopes = itertools.chain.from_iterable( scopes = itertools.chain.from_iterable(
evaluate.find_name(s, rest[0], is_goto=True) evaluate.find_name(s, rest[0], is_goto=True)
for s in scopes) for s in scopes)
else: else:
scopes = itertools.chain.from_iterable( scopes = itertools.chain.from_iterable(
evaluate.follow_path(iter(rest), s, s) evaluate.follow_path(iter(rest), s, s)
for s in scopes) for s in scopes)
scopes = list(scopes) scopes = list(scopes)
if self.is_nested_import(): if self.is_nested_import():
@@ -273,7 +273,7 @@ class ImportPath(pr.Base):
current_namespace = follow_str(current_namespace[1], s) current_namespace = follow_str(current_namespace[1], s)
except ImportError: except ImportError:
if self.import_stmt.relative_count \ if self.import_stmt.relative_count \
and len(self.import_path) == 1: and len(self.import_path) == 1:
# follow `from . import some_variable` # follow `from . import some_variable`
rel_path = self.get_relative_path() rel_path = self.get_relative_path()
with common.ignored(ImportError): with common.ignored(ImportError):

View File

@@ -70,7 +70,7 @@ class ObjectImporter(object):
if objclass and module: if objclass and module:
alias = self._genname() alias = self._genname()
fakeimport = self.make_fakeimport(module, objclass.__name__, fakeimport = self.make_fakeimport(module, objclass.__name__,
alias) alias)
fakestmt = self.make_fakestatement(variable, alias, call=True) fakestmt = self.make_fakestatement(variable, alias, call=True)
scope.add_import(fakeimport) scope.add_import(fakeimport)
scope.add_statement(fakestmt) scope.add_statement(fakestmt)

View File

@@ -45,7 +45,7 @@ class CachedModule(object):
""" get the parser lazy """ """ get the parser lazy """
if self._parser is None: if self._parser is None:
self._parser = cache.load_module(self.path, self.name) \ self._parser = cache.load_module(self.path, self.name) \
or self._load_module() or self._load_module()
return self._parser return self._parser
def _get_source(self): def _get_source(self):
@@ -113,10 +113,10 @@ class ModuleWithCursor(Module):
# Also, the position is here important (which will not be used by # Also, the position is here important (which will not be used by
# default), therefore fill the cache here. # default), therefore fill the cache here.
self._parser = fast_parser.FastParser(self.source, self.path, self._parser = fast_parser.FastParser(self.source, self.path,
self.position) self.position)
# don't pickle that module, because it's changing fast # don't pickle that module, because it's changing fast
cache.save_module(self.path, self.name, self._parser, cache.save_module(self.path, self.name, self._parser,
pickling=False) pickling=False)
return self._parser return self._parser
def get_path_until_cursor(self): def get_path_until_cursor(self):
@@ -136,7 +136,7 @@ class ModuleWithCursor(Module):
self._line_length = len(line) self._line_length = len(line)
line = line + '\n' line = line + '\n'
# add lines with a backslash at the end # add lines with a backslash at the end
while 1: while True:
self._line_temp -= 1 self._line_temp -= 1
last_line = self.get_line(self._line_temp) last_line = self.get_line(self._line_temp)
if last_line and last_line[-1] == '\\': if last_line and last_line[-1] == '\\':
@@ -163,7 +163,7 @@ class ModuleWithCursor(Module):
last_type = None last_type = None
try: try:
for token_type, tok, start, end, line in gen: for token_type, tok, start, end, line in gen:
#print 'tok', token_type, tok, force_point # print 'tok', token_type, tok, force_point
if last_type == token_type == tokenize.NAME: if last_type == token_type == tokenize.NAME:
string += ' ' string += ' '
@@ -214,7 +214,7 @@ class ModuleWithCursor(Module):
after = re.match("[^\w\s]+", line[self.position[1]:]) after = re.match("[^\w\s]+", line[self.position[1]:])
before = re.match("[^\w\s]+", line[:self.position[1]][::-1]) before = re.match("[^\w\s]+", line[:self.position[1]][::-1])
return (before.group(0) if before is not None else '') \ return (before.group(0) if before is not None else '') \
+ (after.group(0) if after is not None else '') + (after.group(0) if after is not None else '')
def get_context(self): def get_context(self):
pos = self._start_cursor_pos pos = self._start_cursor_pos
@@ -375,7 +375,7 @@ def source_to_unicode(source, encoding=None):
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', str(source)).group(0) first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', str(source)).group(0)
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)", possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
first_two_lines) first_two_lines)
if possible_encoding: if possible_encoding:
return possible_encoding.group(1) return possible_encoding.group(1)
else: else:

View File

@@ -46,8 +46,8 @@ class Parser(object):
:param top_module: Use this module as a parent instead of `self.module`. :param top_module: Use this module as a parent instead of `self.module`.
""" """
def __init__(self, source, module_path=None, user_position=None, def __init__(self, source, module_path=None, user_position=None,
no_docstr=False, offset=(0, 0), is_fast_parser=None, no_docstr=False, offset=(0, 0), is_fast_parser=None,
top_module=None): top_module=None):
self.user_position = user_position self.user_position = user_position
self.user_scope = None self.user_scope = None
self.user_stmt = None self.user_stmt = None
@@ -149,7 +149,7 @@ class Parser(object):
append((tok, self.start_pos)) append((tok, self.start_pos))
n = pr.Name(self.module, names, first_pos, self.end_pos) if names \ n = pr.Name(self.module, names, first_pos, self.end_pos) if names \
else None else None
return n, token_type, tok return n, token_type, tok
def _parse_import_list(self): def _parse_import_list(self):
@@ -171,7 +171,7 @@ class Parser(object):
imports = [] imports = []
brackets = False brackets = False
continue_kw = [",", ";", "\n", ')'] \ continue_kw = [",", ";", "\n", ')'] \
+ list(set(keyword.kwlist) - set(['as'])) + list(set(keyword.kwlist) - set(['as']))
while True: while True:
defunct = False defunct = False
token_type, tok = self.next() token_type, tok = self.next()
@@ -207,7 +207,7 @@ class Parser(object):
breaks = [',', ':'] breaks = [',', ':']
while tok not in [')', ':']: while tok not in [')', ':']:
param, tok = self._parse_statement(added_breaks=breaks, param, tok = self._parse_statement(added_breaks=breaks,
stmt_class=pr.Param) stmt_class=pr.Param)
if param and tok == ':': if param and tok == ':':
# parse annotations # parse annotations
annotation, tok = self._parse_statement(added_breaks=breaks) annotation, tok = self._parse_statement(added_breaks=breaks)
@@ -236,7 +236,7 @@ class Parser(object):
return None return None
fname = pr.Name(self.module, [(fname, self.start_pos)], self.start_pos, fname = pr.Name(self.module, [(fname, self.start_pos)], self.start_pos,
self.end_pos) self.end_pos)
token_type, open = self.next() token_type, open = self.next()
if open != '(': if open != '(':
@@ -260,7 +260,7 @@ class Parser(object):
# because of 2 line func param definitions # because of 2 line func param definitions
scope = pr.Function(self.module, fname, params, first_pos, annotation) scope = pr.Function(self.module, fname, params, first_pos, annotation)
if self.user_scope and scope != self.user_scope \ if self.user_scope and scope != self.user_scope \
and self.user_position > first_pos: and self.user_position > first_pos:
self.user_scope = scope self.user_scope = scope
return scope return scope
@@ -276,11 +276,11 @@ class Parser(object):
token_type, cname = self.next() token_type, cname = self.next()
if token_type != tokenize.NAME: if token_type != tokenize.NAME:
debug.warning("class: syntax err, token is not a name@%s (%s: %s)" debug.warning("class: syntax err, token is not a name@%s (%s: %s)"
% (self.start_pos[0], tokenize.tok_name[token_type], cname)) % (self.start_pos[0], tokenize.tok_name[token_type], cname))
return None return None
cname = pr.Name(self.module, [(cname, self.start_pos)], self.start_pos, cname = pr.Name(self.module, [(cname, self.start_pos)], self.start_pos,
self.end_pos) self.end_pos)
super = [] super = []
token_type, _next = self.next() token_type, _next = self.next()
@@ -295,12 +295,12 @@ class Parser(object):
# because of 2 line class initializations # because of 2 line class initializations
scope = pr.Class(self.module, cname, super, first_pos) scope = pr.Class(self.module, cname, super, first_pos)
if self.user_scope and scope != self.user_scope \ if self.user_scope and scope != self.user_scope \
and self.user_position > first_pos: and self.user_position > first_pos:
self.user_scope = scope self.user_scope = scope
return scope return scope
def _parse_statement(self, pre_used_token=None, added_breaks=None, def _parse_statement(self, pre_used_token=None, added_breaks=None,
stmt_class=pr.Statement): stmt_class=pr.Statement):
""" """
Parses statements like:: Parses statements like::
@@ -345,10 +345,10 @@ class Parser(object):
tok_list = [] tok_list = []
while not (tok in always_break while not (tok in always_break
or tok in not_first_break and not tok_list or tok in not_first_break and not tok_list
or tok in breaks and level <= 0): or tok in breaks and level <= 0):
try: try:
#print 'parse_stmt', tok, tokenize.tok_name[token_type] # print 'parse_stmt', tok, tokenize.tok_name[token_type]
tok_list.append(self.current + (self.start_pos,)) tok_list.append(self.current + (self.start_pos,))
if tok == 'as': if tok == 'as':
token_type, tok = self.next() token_type, tok = self.next()
@@ -387,9 +387,9 @@ class Parser(object):
if not tok_list: if not tok_list:
return None, tok return None, tok
#print 'new_stat', set_vars, used_vars # print 'new_stat', set_vars, used_vars
if self.freshscope and not self.no_docstr and len(tok_list) == 1 \ if self.freshscope and not self.no_docstr and len(tok_list) == 1 \
and self.last_token[0] == tokenize.STRING: and self.last_token[0] == tokenize.STRING:
self._scope.add_docstr(self.last_token[1]) self._scope.add_docstr(self.last_token[1])
return None, tok return None, tok
else: else:
@@ -432,7 +432,7 @@ class Parser(object):
s = self._scope s = self._scope
while s is not None: while s is not None:
if isinstance(s, pr.Module) \ if isinstance(s, pr.Module) \
and not isinstance(s, pr.SubModule): and not isinstance(s, pr.SubModule):
self.module.end_pos = self.end_pos self.module.end_pos = self.end_pos
break break
s.end_pos = self.end_pos s.end_pos = self.end_pos
@@ -440,10 +440,10 @@ class Parser(object):
raise raise
if self.user_position and (self.start_pos[0] == self.user_position[0] if self.user_position and (self.start_pos[0] == self.user_position[0]
or self.user_scope is None or self.user_scope is None
and self.start_pos[0] >= self.user_position[0]): and self.start_pos[0] >= self.user_position[0]):
debug.dbg('user scope found [%s] = %s' % debug.dbg('user scope found [%s] = %s' %
(self.parserline.replace('\n', ''), repr(self._scope))) (self.parserline.replace('\n', ''), repr(self._scope)))
self.user_scope = self._scope self.user_scope = self._scope
self.last_token = self.current self.last_token = self.current
self.current = (typ, tok) self.current = (typ, tok)
@@ -469,7 +469,7 @@ class Parser(object):
# This iterator stuff is not intentional. It grew historically. # This iterator stuff is not intentional. It grew historically.
for token_type, tok in self.iterator: for token_type, tok in self.iterator:
self.module.temp_used_names = [] self.module.temp_used_names = []
#debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\ # debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
# % (tok, tokenize.tok_name[token_type], start_position[0])) # % (tok, tokenize.tok_name[token_type], start_position[0]))
while token_type == tokenize.DEDENT and self._scope != self.module: while token_type == tokenize.DEDENT and self._scope != self.module:
@@ -494,13 +494,13 @@ class Parser(object):
self._scope = self.module self._scope = self.module
use_as_parent_scope = self.top_module if isinstance(self._scope, use_as_parent_scope = self.top_module if isinstance(self._scope,
pr.SubModule) else self._scope pr.SubModule) else self._scope
first_pos = self.start_pos first_pos = self.start_pos
if tok == 'def': if tok == 'def':
func = self._parse_function() func = self._parse_function()
if func is None: if func is None:
debug.warning("function: syntax error@%s" % debug.warning("function: syntax error@%s" %
self.start_pos[0]) self.start_pos[0])
continue continue
self.freshscope = True self.freshscope = True
self._scope = self._scope.add_scope(func, self._decorators) self._scope = self._scope.add_scope(func, self._decorators)
@@ -520,19 +520,19 @@ class Parser(object):
e = (alias or m or self).end_pos e = (alias or m or self).end_pos
end_pos = self.end_pos if count + 1 == len(imports) else e end_pos = self.end_pos if count + 1 == len(imports) else e
i = pr.Import(self.module, first_pos, end_pos, m, i = pr.Import(self.module, first_pos, end_pos, m,
alias, defunct=defunct) alias, defunct=defunct)
self._check_user_stmt(i) self._check_user_stmt(i)
self._scope.add_import(i) self._scope.add_import(i)
if not imports: if not imports:
i = pr.Import(self.module, first_pos, self.end_pos, None, i = pr.Import(self.module, first_pos, self.end_pos, None,
defunct=True) defunct=True)
self._check_user_stmt(i) self._check_user_stmt(i)
self.freshscope = False self.freshscope = False
elif tok == 'from': elif tok == 'from':
defunct = False defunct = False
# take care for relative imports # take care for relative imports
relative_count = 0 relative_count = 0
while 1: while True:
token_type, tok = self.next() token_type, tok = self.next()
if tok != '.': if tok != '.':
break break
@@ -556,12 +556,12 @@ class Parser(object):
e = (alias or name or self).end_pos e = (alias or name or self).end_pos
end_pos = self.end_pos if count + 1 == len(names) else e end_pos = self.end_pos if count + 1 == len(names) else e
i = pr.Import(self.module, first_pos, end_pos, name, i = pr.Import(self.module, first_pos, end_pos, name,
alias, mod, star, relative_count, alias, mod, star, relative_count,
defunct=defunct or defunct2) defunct=defunct or defunct2)
self._check_user_stmt(i) self._check_user_stmt(i)
self._scope.add_import(i) self._scope.add_import(i)
self.freshscope = False self.freshscope = False
#loops # loops
elif tok == 'for': elif tok == 'for':
set_stmt, tok = self._parse_statement(added_breaks=['in']) set_stmt, tok = self._parse_statement(added_breaks=['in'])
if tok == 'in': if tok == 'in':
@@ -572,14 +572,14 @@ class Parser(object):
self._scope = self._scope.add_statement(f) self._scope = self._scope.add_statement(f)
else: else:
debug.warning('syntax err, for flow started @%s', debug.warning('syntax err, for flow started @%s',
self.start_pos[0]) self.start_pos[0])
if statement is not None: if statement is not None:
statement.parent = use_as_parent_scope statement.parent = use_as_parent_scope
if set_stmt is not None: if set_stmt is not None:
set_stmt.parent = use_as_parent_scope set_stmt.parent = use_as_parent_scope
else: else:
debug.warning('syntax err, for flow incomplete @%s', debug.warning('syntax err, for flow incomplete @%s',
self.start_pos[0]) self.start_pos[0])
if set_stmt is not None: if set_stmt is not None:
set_stmt.parent = use_as_parent_scope set_stmt.parent = use_as_parent_scope
@@ -592,7 +592,7 @@ class Parser(object):
inputs = [] inputs = []
first = True first = True
while first or command == 'with' \ while first or command == 'with' \
and tok not in [':', '\n']: and tok not in [':', '\n']:
statement, tok = \ statement, tok = \
self._parse_statement(added_breaks=added_breaks) self._parse_statement(added_breaks=added_breaks)
if command == 'except' and tok in added_breaks: if command == 'except' and tok in added_breaks:
@@ -623,7 +623,7 @@ class Parser(object):
for i in inputs: for i in inputs:
i.parent = use_as_parent_scope i.parent = use_as_parent_scope
debug.warning('syntax err, flow started @%s', debug.warning('syntax err, flow started @%s',
self.start_pos[0]) self.start_pos[0])
# returns # returns
elif tok in ['return', 'yield']: elif tok in ['return', 'yield']:
s = self.start_pos s = self.start_pos
@@ -677,6 +677,6 @@ class Parser(object):
if token_type not in [tokenize.COMMENT, tokenize.INDENT, if token_type not in [tokenize.COMMENT, tokenize.INDENT,
tokenize.NEWLINE, tokenize.NL]: tokenize.NEWLINE, tokenize.NL]:
debug.warning('token not classified', tok, token_type, debug.warning('token not classified', tok, token_type,
self.start_pos[0]) self.start_pos[0])
continue continue
self.no_docstr = False self.no_docstr = False

View File

@@ -91,7 +91,7 @@ class Simple(Base):
@property @property
def start_pos(self): def start_pos(self):
return self._sub_module.line_offset + self._start_pos[0], \ return self._sub_module.line_offset + self._start_pos[0], \
self._start_pos[1] self._start_pos[1]
@start_pos.setter @start_pos.setter
def start_pos(self, value): def start_pos(self, value):
@@ -102,7 +102,7 @@ class Simple(Base):
if None in self._end_pos: if None in self._end_pos:
return self._end_pos return self._end_pos
return self._sub_module.line_offset + self._end_pos[0], \ return self._sub_module.line_offset + self._end_pos[0], \
self._end_pos[1] self._end_pos[1]
@end_pos.setter @end_pos.setter
def end_pos(self, value): def end_pos(self, value):
@@ -110,7 +110,7 @@ class Simple(Base):
@Python3Method @Python3Method
def get_parent_until(self, classes=(), reverse=False, def get_parent_until(self, classes=(), reverse=False,
include_current=True): include_current=True):
""" Takes always the parent, until one class (not a Class) """ """ Takes always the parent, until one class (not a Class) """
if type(classes) not in (tuple, list): if type(classes) not in (tuple, list):
classes = (classes,) classes = (classes,)
@@ -259,7 +259,7 @@ class Scope(Simple, IsScope):
""" """
return [n for n in self.get_set_vars() return [n for n in self.get_set_vars()
if isinstance(n, Import) or len(n) == 1] if isinstance(n, Import) or len(n) == 1]
def is_empty(self): def is_empty(self):
""" """
@@ -304,7 +304,7 @@ class Scope(Simple, IsScope):
name = self.command name = self.command
return "<%s: %s@%s-%s>" % (type(self).__name__, name, return "<%s: %s@%s-%s>" % (type(self).__name__, name,
self.start_pos[0], self.end_pos[0]) self.start_pos[0], self.end_pos[0])
class Module(IsScope): class Module(IsScope):
@@ -366,12 +366,12 @@ class SubModule(Scope, Module):
else: else:
sep = (re.escape(os.path.sep),) * 2 sep = (re.escape(os.path.sep),) * 2
r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep, r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep,
self.path) self.path)
# remove PEP 3149 names # remove PEP 3149 names
string = re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1)) string = re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
names = [(string, (0, 0))] names = [(string, (0, 0))]
self._name = Name(self, names, self.start_pos, self.end_pos, self._name = Name(self, names, self.start_pos, self.end_pos,
self.use_as_parent) self.use_as_parent)
return self._name return self._name
def is_builtin(self): def is_builtin(self):
@@ -532,7 +532,7 @@ class Lambda(Function):
def __repr__(self): def __repr__(self):
return "<%s @%s (%s-%s)>" % (type(self).__name__, self.start_pos[0], return "<%s @%s (%s-%s)>" % (type(self).__name__, self.start_pos[0],
self.start_pos[1], self.end_pos[1]) self.start_pos[1], self.end_pos[1])
class Flow(Scope): class Flow(Scope):
@@ -641,7 +641,7 @@ class ForFlow(Flow):
def __init__(self, module, inputs, start_pos, set_stmt, def __init__(self, module, inputs, start_pos, set_stmt,
is_list_comp=False): is_list_comp=False):
super(ForFlow, self).__init__(module, 'for', inputs, start_pos, super(ForFlow, self).__init__(module, 'for', inputs, start_pos,
set_stmt.used_vars) set_stmt.used_vars)
self.set_stmt = set_stmt self.set_stmt = set_stmt
set_stmt.parent = self.use_as_parent set_stmt.parent = self.use_as_parent
self.is_list_comp = is_list_comp self.is_list_comp = is_list_comp
@@ -814,7 +814,7 @@ class Statement(Simple):
def get_code(self, new_line=True): def get_code(self, new_line=True):
def assemble(command_list, assignment=None): def assemble(command_list, assignment=None):
pieces = [c.get_code() if isinstance(c, Simple) else unicode(c) pieces = [c.get_code() if isinstance(c, Simple) else unicode(c)
for c in command_list] for c in command_list]
if assignment is None: if assignment is None:
return ''.join(pieces) return ''.join(pieces)
return '%s %s ' % (''.join(pieces), assignment) return '%s %s ' % (''.join(pieces), assignment)
@@ -866,7 +866,7 @@ class Statement(Simple):
""" """
def is_assignment(tok): def is_assignment(tok):
return isinstance(tok, (str, unicode)) and tok.endswith('=') \ return isinstance(tok, (str, unicode)) and tok.endswith('=') \
and not tok in ['>=', '<=', '==', '!='] and not tok in ['>=', '<=', '==', '!=']
def parse_array(token_iterator, array_type, start_pos, add_el=None, def parse_array(token_iterator, array_type, start_pos, add_el=None,
added_breaks=()): added_breaks=()):
@@ -901,7 +901,7 @@ class Statement(Simple):
c = token_iterator.current[1] c = token_iterator.current[1]
arr.end_pos = c.end_pos if isinstance(c, Simple) \ arr.end_pos = c.end_pos if isinstance(c, Simple) \
else (c[2][0], c[2][1] + len(c[1])) else (c[2][0], c[2][1] + len(c[1]))
return arr, break_tok return arr, break_tok
def parse_stmt(token_iterator, maybe_dict=False, added_breaks=(), def parse_stmt(token_iterator, maybe_dict=False, added_breaks=(),
@@ -939,7 +939,7 @@ class Statement(Simple):
token_list.append(lambd) token_list.append(lambd)
elif tok == 'for': elif tok == 'for':
list_comp, tok = parse_list_comp(token_iterator, list_comp, tok = parse_list_comp(token_iterator,
token_list, start_pos, last_end_pos) token_list, start_pos, last_end_pos)
if list_comp is not None: if list_comp is not None:
token_list = [list_comp] token_list = [list_comp]
@@ -951,8 +951,8 @@ class Statement(Simple):
if level == 0 and tok in closing_brackets \ if level == 0 and tok in closing_brackets \
or tok in added_breaks \ or tok in added_breaks \
or level == 1 and (tok == ',' or level == 1 and (tok == ','
or maybe_dict and tok == ':' or maybe_dict and tok == ':'
or is_assignment(tok) and break_on_assignment): or is_assignment(tok) and break_on_assignment):
end_pos = end_pos[0], end_pos[1] - 1 end_pos = end_pos[0], end_pos[1] - 1
break break
token_list.append(tok_temp) token_list.append(tok_temp)
@@ -961,7 +961,7 @@ class Statement(Simple):
return None, tok return None, tok
statement = stmt_class(self._sub_module, [], [], token_list, statement = stmt_class(self._sub_module, [], [], token_list,
start_pos, end_pos, self.parent) start_pos, end_pos, self.parent)
statement.used_vars = used_vars statement.used_vars = used_vars
return statement, tok return statement, tok
@@ -1078,7 +1078,7 @@ class Statement(Simple):
is_chain = False is_chain = False
elif tok in brackets.keys(): elif tok in brackets.keys():
arr, is_ass = parse_array(token_iterator, brackets[tok], arr, is_ass = parse_array(token_iterator, brackets[tok],
start_pos) start_pos)
if result and isinstance(result[-1], Call): if result and isinstance(result[-1], Call):
result[-1].set_execution(arr) result[-1].set_execution(arr)
else: else:
@@ -1101,10 +1101,10 @@ class Statement(Simple):
end_pos = t.end_pos end_pos = t.end_pos
except AttributeError: except AttributeError:
end_pos = (t[2][0], t[2][1] + len(t[1])) \ end_pos = (t[2][0], t[2][1] + len(t[1])) \
if isinstance(t, tuple) else t.start_pos if isinstance(t, tuple) else t.start_pos
stmt = Statement(self._sub_module, [], [], result, stmt = Statement(self._sub_module, [], [], result,
start_pos, end_pos, self.parent) start_pos, end_pos, self.parent)
stmt._commands = result stmt._commands = result
arr, break_tok = parse_array(token_iterator, Array.TUPLE, arr, break_tok = parse_array(token_iterator, Array.TUPLE,
stmt.start_pos, stmt) stmt.start_pos, stmt)
@@ -1211,7 +1211,7 @@ class Call(Simple):
s = self.name.get_code() s = self.name.get_code()
else: else:
if not is_py3k and isinstance(self.name, str)\ if not is_py3k and isinstance(self.name, str)\
and "'" not in self.name: and "'" not in self.name:
# This is a very rough spot, because of repr not supporting # This is a very rough spot, because of repr not supporting
# unicode signs, see `test_unicode_script`. # unicode signs, see `test_unicode_script`.
s = "'%s'" % unicode(self.name, 'UTF-8') s = "'%s'" % unicode(self.name, 'UTF-8')
@@ -1225,7 +1225,7 @@ class Call(Simple):
def __repr__(self): def __repr__(self):
return "<%s: %s>" % \ return "<%s: %s>" % \
(type(self).__name__, self.name) (type(self).__name__, self.name)
class Array(Call): class Array(Call):
@@ -1356,7 +1356,7 @@ class Name(Simple):
def __init__(self, module, names, start_pos, end_pos, parent=None): def __init__(self, module, names, start_pos, end_pos, parent=None):
super(Name, self).__init__(module, start_pos, end_pos) super(Name, self).__init__(module, start_pos, end_pos)
self.names = tuple(n if isinstance(n, NamePart) else self.names = tuple(n if isinstance(n, NamePart) else
NamePart(n[0], self, n[1]) for n in names) NamePart(n[0], self, n[1]) for n in names)
if parent is not None: if parent is not None:
self.parent = parent self.parent = parent
@@ -1399,7 +1399,7 @@ class ListComprehension(Base):
def __repr__(self): def __repr__(self):
return "<%s: %s>" % \ return "<%s: %s>" % \
(type(self).__name__, self.get_code()) (type(self).__name__, self.get_code())
def get_code(self): def get_code(self):
statements = self.stmt, self.middle, self.input statements = self.stmt, self.middle, self.input

View File

@@ -24,7 +24,7 @@ class RecursionDecorator(object):
self.reset() self.reset()
def __call__(self, stmt, *args, **kwargs): def __call__(self, stmt, *args, **kwargs):
#print stmt, len(self.node_statements()) # print stmt, len(self.node_statements())
if self.push_stmt(stmt): if self.push_stmt(stmt):
return [] return []
else: else:
@@ -37,7 +37,7 @@ class RecursionDecorator(object):
check = self._check_recursion() check = self._check_recursion()
if check: # TODO remove False!!!! if check: # TODO remove False!!!!
debug.warning('catched stmt recursion: %s against %s @%s' debug.warning('catched stmt recursion: %s against %s @%s'
% (stmt, check.stmt, stmt.start_pos)) % (stmt, check.stmt, stmt.start_pos))
self.pop_stmt() self.pop_stmt()
return True return True
return False return False
@@ -82,7 +82,7 @@ class RecursionNode(object):
# The same's true for the builtins, because the builtins are really # The same's true for the builtins, because the builtins are really
# simple. # simple.
self.is_ignored = isinstance(stmt, pr.Param) \ self.is_ignored = isinstance(stmt, pr.Param) \
or (self.script == builtin.Builtin.scope) or (self.script == builtin.Builtin.scope)
def __eq__(self, other): def __eq__(self, other):
if not other: if not other:
@@ -90,10 +90,10 @@ class RecursionNode(object):
is_list_comp = lambda x: isinstance(x, pr.ForFlow) and x.is_list_comp is_list_comp = lambda x: isinstance(x, pr.ForFlow) and x.is_list_comp
return self.script == other.script \ return self.script == other.script \
and self.position == other.position \ and self.position == other.position \
and not is_list_comp(self.stmt.parent) \ and not is_list_comp(self.stmt.parent) \
and not is_list_comp(other.parent) \ and not is_list_comp(other.parent) \
and not self.is_ignored and not other.is_ignored and not self.is_ignored and not other.is_ignored
class ExecutionRecursionDecorator(object): class ExecutionRecursionDecorator(object):
@@ -107,7 +107,7 @@ class ExecutionRecursionDecorator(object):
def __call__(self, execution, evaluate_generator=False): def __call__(self, execution, evaluate_generator=False):
debug.dbg('Execution recursions: %s' % execution, self.recursion_level, debug.dbg('Execution recursions: %s' % execution, self.recursion_level,
self.execution_count, len(self.execution_funcs)) self.execution_count, len(self.execution_funcs))
if self.check_recursion(execution, evaluate_generator): if self.check_recursion(execution, evaluate_generator):
result = [] result = []
else: else:

View File

@@ -65,7 +65,7 @@ def rename(script, new_name):
def _rename(names, replace_str): def _rename(names, replace_str):
""" For both rename and inline. """ """ For both rename and inline. """
order = sorted(names, key=lambda x: (x.module_path, x.start_pos), order = sorted(names, key=lambda x: (x.module_path, x.start_pos),
reverse=True) reverse=True)
def process(path, old_lines, new_lines): def process(path, old_lines, new_lines):
if new_lines is not None: # goto next file, save last if new_lines is not None: # goto next file, save last
@@ -92,7 +92,7 @@ def _rename(names, replace_str):
nr, indent = name.start_pos nr, indent = name.start_pos
line = new_lines[nr - 1] line = new_lines[nr - 1]
new_lines[nr - 1] = line[:indent] + replace_str + \ new_lines[nr - 1] = line[:indent] + replace_str + \
line[indent + len(name.text):] line[indent + len(name.text):]
process(current_path, old_lines, new_lines) process(current_path, old_lines, new_lines)
return dct return dct
@@ -148,7 +148,7 @@ def extract(script, new_name):
open_brackets = ['(', '[', '{'] open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}'] close_brackets = [')', ']', '}']
if '\n' in text and not (text[0] in open_brackets and text[-1] == if '\n' in text and not (text[0] in open_brackets and text[-1] ==
close_brackets[open_brackets.index(text[0])]): close_brackets[open_brackets.index(text[0])]):
text = '(%s)' % text text = '(%s)' % text
# add new line before statement # add new line before statement
@@ -173,9 +173,9 @@ def inline(script):
stmt = definitions[0]._definition stmt = definitions[0]._definition
usages = script.usages() usages = script.usages()
inlines = [r for r in usages inlines = [r for r in usages
if not stmt.start_pos <= r.start_pos <= stmt.end_pos] if not stmt.start_pos <= r.start_pos <= stmt.end_pos]
inlines = sorted(inlines, key=lambda x: (x.module_path, x.start_pos), inlines = sorted(inlines, key=lambda x: (x.module_path, x.start_pos),
reverse=True) reverse=True)
commands = stmt.get_commands() commands = stmt.get_commands()
# don't allow multiline refactorings for now. # don't allow multiline refactorings for now.
assert stmt.start_pos[0] == stmt.end_pos[0] assert stmt.start_pos[0] == stmt.end_pos[0]

View File

@@ -168,12 +168,12 @@ def generate_tokens(readline):
if endmatch: if endmatch:
pos = end = endmatch.end(0) pos = end = endmatch.end(0)
yield TokenInfo(STRING, contstr + line[:end], yield TokenInfo(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line) strstart, (lnum, end), contline + line)
contstr, needcont = '', 0 contstr, needcont = '', 0
contline = None contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield TokenInfo(ERRORTOKEN, contstr + line, yield TokenInfo(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline) strstart, (lnum, len(line)), contline)
contstr = '' contstr = ''
contline = None contline = None
continue continue
@@ -204,12 +204,13 @@ def generate_tokens(readline):
comment_token = line[pos:].rstrip('\r\n') comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token) nl_pos = pos + len(comment_token)
yield TokenInfo(COMMENT, comment_token, yield TokenInfo(COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line) (lnum, pos), (lnum, pos + len(comment_token)), line)
yield TokenInfo(NL, line[nl_pos:], yield TokenInfo(NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line) (lnum, nl_pos), (lnum, len(line)), line)
else: else:
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:], yield TokenInfo(
(lnum, pos), (lnum, len(line)), line) (NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue continue
if column > indents[-1]: # count indents or dedents if column > indents[-1]: # count indents or dedents
@@ -237,7 +238,7 @@ def generate_tokens(readline):
yield TokenInfo(NUMBER, token, spos, epos, line) yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n': elif initial in '\r\n':
yield TokenInfo(NL if parenlev > 0 else NEWLINE, yield TokenInfo(NL if parenlev > 0 else NEWLINE,
token, spos, epos, line) token, spos, epos, line)
elif initial == '#': elif initial == '#':
assert not token.endswith("\n") assert not token.endswith("\n")
yield TokenInfo(COMMENT, token, spos, epos, line) yield TokenInfo(COMMENT, token, spos, epos, line)
@@ -277,7 +278,7 @@ def generate_tokens(readline):
yield TokenInfo(OP, token, spos, epos, line) yield TokenInfo(OP, token, spos, epos, line)
else: else:
yield TokenInfo(ERRORTOKEN, line[pos], yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos + 1), line) (lnum, pos), (lnum, pos + 1), line)
pos += 1 pos += 1
for indent in indents[1:]: # pop remaining indent levels for indent in indents[1:]: # pop remaining indent levels

View File

@@ -7,7 +7,7 @@ The twisted equivalent of this module is ``twisted.trial._synctest``.
""" """
from __future__ import absolute_import from __future__ import absolute_import
import unittest # this is stdlib unittest, but jedi gets the local one import unittest
class Assertions(unittest.TestCase): class Assertions(unittest.TestCase):