From 86d775324bc3a7dd1c574d3e5968ba50492c076b Mon Sep 17 00:00:00 2001 From: Danilo Bargen Date: Sat, 27 Apr 2013 15:40:36 +0200 Subject: [PATCH] Fix most flake8 issues (fixes #205) --- jedi/_compatibility.py | 3 +++ jedi/api.py | 2 +- jedi/api_classes.py | 27 +++++++++++++------------- jedi/debug.py | 1 + jedi/dynamic.py | 4 ++-- jedi/fast_parser.py | 28 ++++++++++++++------------- jedi/imports.py | 2 +- jedi/parsing.py | 10 +++++----- jedi/parsing_representation.py | 13 +++++++------ jedi/recursion.py | 2 +- jedi/tokenizer.py | 35 ++++++++++++++++++++++++---------- 11 files changed, 75 insertions(+), 52 deletions(-) diff --git a/jedi/_compatibility.py b/jedi/_compatibility.py index 29aef154..73c7f865 100644 --- a/jedi/_compatibility.py +++ b/jedi/_compatibility.py @@ -18,6 +18,7 @@ is_py3k = sys.hexversion >= 0x03000000 is_py33 = sys.hexversion >= 0x03030000 is_py25 = sys.hexversion < 0x02060000 + def find_module_py33(string, path=None): mod_info = (None, None, None) loader = None @@ -48,6 +49,7 @@ def find_module_py33(string, path=None): return mod_info + def find_module_pre_py33(string, path=None): mod_info = None if path is None: @@ -57,6 +59,7 @@ def find_module_pre_py33(string, path=None): return (mod_info[0], mod_info[1], mod_info[2][2] == imp.PKG_DIRECTORY) + def find_module(string, path=None): """Provides information about a module. diff --git a/jedi/api.py b/jedi/api.py index af94e287..1ed57f6b 100644 --- a/jedi/api.py +++ b/jedi/api.py @@ -133,7 +133,7 @@ class Script(object): bs = builtin.Builtin.scope if isinstance(u, pr.Import): if (u.relative_count > 0 or u.from_ns) and not re.search( - r'(,|from)\s*$|import\s+$', completion_line): + r'(,|from)\s*$|import\s+$', completion_line): completions += ((k, bs) for k in keywords.get_keywords('import')) diff --git a/jedi/api_classes.py b/jedi/api_classes.py index 77d4a489..cbb64f7a 100644 --- a/jedi/api_classes.py +++ b/jedi/api_classes.py @@ -50,19 +50,20 @@ def _clear_caches_after_call(func): class BaseDefinition(object): - _mapping = {'posixpath': 'os.path', - 'riscospath': 'os.path', - 'ntpath': 'os.path', - 'os2emxpath': 'os.path', - 'macpath': 'os.path', - 'genericpath': 'os.path', - 'posix': 'os', - '_io': 'io', - '_functools': 'functools', - '_sqlite3': 'sqlite3', - '__builtin__': '', - 'builtins': '', - } + _mapping = { + 'posixpath': 'os.path', + 'riscospath': 'os.path', + 'ntpath': 'os.path', + 'os2emxpath': 'os.path', + 'macpath': 'os.path', + 'genericpath': 'os.path', + 'posix': 'os', + '_io': 'io', + '_functools': 'functools', + '_sqlite3': 'sqlite3', + '__builtin__': '', + 'builtins': '', + } _tuple_mapping = dict((tuple(k.split('.')), v) for (k, v) in { 'argparse._ActionsContainer': 'argparse.ArgumentParser', diff --git a/jedi/debug.py b/jedi/debug.py index 321ffa87..1f08dcca 100644 --- a/jedi/debug.py +++ b/jedi/debug.py @@ -46,6 +46,7 @@ def warning(*args): def speed(name): if debug_function and enable_speed: + global start_time now = time.time() debug_function(SPEED, 'speed: ' + '%s %s' % (name, now - start_time)) diff --git a/jedi/dynamic.py b/jedi/dynamic.py index bd87eeb2..fb96c295 100644 --- a/jedi/dynamic.py +++ b/jedi/dynamic.py @@ -248,7 +248,7 @@ def _scan_statement(stmt, search_name, assignment_details=False): check = list(stmt.get_commands()) if assignment_details: for commands, op in stmt.assignment_details: - check += commands + check += commands result = [] for c in check: @@ -529,7 +529,7 @@ def check_statement_information(stmt, search_name): # isinstance check isinst = call.execution.values assert len(isinst) == 2 # has two params - obj, classes = [stmt.get_commands() for stmt in isinst] + obj, classes = [statement.get_commands() for statement in isinst] assert len(obj) == 1 assert len(classes) == 1 assert isinstance(obj[0], pr.Call) diff --git a/jedi/fast_parser.py b/jedi/fast_parser.py index 4883e9c8..1923a76e 100644 --- a/jedi/fast_parser.py +++ b/jedi/fast_parser.py @@ -41,19 +41,21 @@ class Module(pr.Simple, pr.Module): return self.cache[key] def __getattr__(self, name): - operators = {'get_imports': operator.add, - 'get_code': operator.add, - 'get_set_vars': operator.add, - 'get_defined_names': operator.add, - 'is_empty': operator.and_ - } - properties = {'subscopes': operator.add, - 'imports': operator.add, - 'statements': operator.add, - 'imports': operator.add, - 'asserts': operator.add, - 'global_vars': operator.add - } + operators = { + 'get_imports': operator.add, + 'get_code': operator.add, + 'get_set_vars': operator.add, + 'get_defined_names': operator.add, + 'is_empty': operator.and_ + } + properties = { + 'subscopes': operator.add, + 'imports': operator.add, + 'statements': operator.add, + 'imports': operator.add, + 'asserts': operator.add, + 'global_vars': operator.add + } if name in operators: return lambda *args, **kwargs: self._get(name, operators[name], True, *args, **kwargs) diff --git a/jedi/imports.py b/jedi/imports.py index d6081386..81669ba1 100644 --- a/jedi/imports.py +++ b/jedi/imports.py @@ -249,7 +249,7 @@ class ImportPath(pr.Base): sys.path = temp raise sys.path = temp - + return importing if self.file_path: diff --git a/jedi/parsing.py b/jedi/parsing.py index e1785c01..cbf32667 100644 --- a/jedi/parsing.py +++ b/jedi/parsing.py @@ -401,10 +401,10 @@ class Parser(object): with common.ignored(IndexError, AttributeError): # If string literal is being parsed first_tok = stmt.token_list[0] - if (not stmt.set_vars and - not stmt.used_vars and - len(stmt.token_list) == 1 and - first_tok[0] == tokenize.STRING): + if (not stmt.set_vars + and not stmt.used_vars + and len(stmt.token_list) == 1 + and first_tok[0] == tokenize.STRING): # ... then set it as a docstring self.scope.statements[-1].add_docstr(first_tok[1]) @@ -434,7 +434,7 @@ class Parser(object): if self.user_position and (self.start_pos[0] == self.user_position[0] or self.user_scope is None and self.start_pos[0] >= self.user_position[0]): - debug.dbg('user scope found [%s] = %s' % \ + debug.dbg('user scope found [%s] = %s' % (self.parserline.replace('\n', ''), repr(self.scope))) self.user_scope = self.scope self.last_token = self.current diff --git a/jedi/parsing_representation.py b/jedi/parsing_representation.py index a7dc9545..562baa67 100644 --- a/jedi/parsing_representation.py +++ b/jedi/parsing_representation.py @@ -1272,12 +1272,13 @@ class Array(Call): return zip(self.keys, self.values) def get_code(self): - map = {self.NOARRAY: '(%s)', - self.TUPLE: '(%s)', - self.LIST: '[%s]', - self.DICT: '{%s}', - self.SET: '{%s}' - } + map = { + self.NOARRAY: '(%s)', + self.TUPLE: '(%s)', + self.LIST: '[%s]', + self.DICT: '{%s}', + self.SET: '{%s}' + } inner = [] for i, stmt in enumerate(self.values): s = '' diff --git a/jedi/recursion.py b/jedi/recursion.py index 1727897a..7346fecf 100644 --- a/jedi/recursion.py +++ b/jedi/recursion.py @@ -35,7 +35,7 @@ class RecursionDecorator(object): def push_stmt(self, stmt): self.current = RecursionNode(stmt, self.current) check = self._check_recursion() - if check:# TODO remove False!!!! + if check: # TODO remove False!!!! debug.warning('catched stmt recursion: %s against %s @%s' % (stmt, check.stmt, stmt.start_pos)) self.pop_stmt() diff --git a/jedi/tokenizer.py b/jedi/tokenizer.py index e4b5c0d6..5cc2ce65 100644 --- a/jedi/tokenizer.py +++ b/jedi/tokenizer.py @@ -26,15 +26,25 @@ ENCODING = N_TOKENS + 2 tok_name[ENCODING] = 'ENCODING' N_TOKENS += 3 + class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')): def __repr__(self): annotated_type = '%d (%s)' % (self.type, tok_name[self.type]) return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' % self._replace(type=annotated_type)) -def group(*choices): return '(' + '|'.join(choices) + ')' -def any(*choices): return group(*choices) + '*' -def maybe(*choices): return group(*choices) + '?' + +def group(*choices): + return '(' + '|'.join(choices) + ')' + + +def any(*choices): + return group(*choices) + '*' + + +def maybe(*choices): + return group(*choices) + '?' + # Note: we use unicode matching for names ("\w") but ascii matching for # number literals. @@ -91,9 +101,11 @@ ContStr = group(r"[bB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) + def _compile(expr): return re.compile(expr, re.UNICODE) + tokenprog, pseudoprog, single3prog, double3prog = map( _compile, (Token, PseudoToken, Single3, Double3)) endprogs = {"'": _compile(Single), '"': _compile(Double), @@ -120,14 +132,16 @@ for t in ("'", '"', "r'", 'r"', "R'", 'R"', "b'", 'b"', "B'", 'B"', "br'", 'br"', "Br'", 'Br"', - "bR'", 'bR"', "BR'", 'BR"' ): + "bR'", 'bR"', "BR'", 'BR"'): single_quoted[t] = t del _compile tabsize = 8 -class TokenError(Exception): pass + +class TokenError(Exception): + pass def generate_tokens(readline): @@ -169,13 +183,14 @@ def generate_tokens(readline): continue elif parenlev == 0 and not continued: # new statement - if not line: break + if not line: + break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column += 1 elif line[pos] == '\t': - column = (column//tabsize + 1)*tabsize + column = (column // tabsize + 1) * tabsize elif line[pos] == '\f': column = 0 else: @@ -218,7 +233,7 @@ def generate_tokens(readline): token, initial = line[start:end], line[start] if (initial in numchars or # ordinary number - (initial == '.' and token != '.' and token != '...')): + (initial == '.' and token != '.' and token != '...')): yield TokenInfo(NUMBER, token, spos, epos, line) elif initial in '\r\n': yield TokenInfo(NL if parenlev > 0 else NEWLINE, @@ -239,8 +254,8 @@ def generate_tokens(readline): contline = line break elif initial in single_quoted or \ - token[:2] in single_quoted or \ - token[:3] in single_quoted: + token[:2] in single_quoted or \ + token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or