From 01ddacfec4fb8e10de2b4a160293ae516b4ec2b6 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Tue, 14 Jun 2016 08:57:38 +0200 Subject: [PATCH] Generalize the tuple order of tokens. --- jedi/parser/__init__.py | 11 +++++------ jedi/parser/pgen2/parse.py | 2 +- jedi/parser/tree.py | 4 ++-- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/jedi/parser/__init__.py b/jedi/parser/__init__.py index a37b13bc..dc23c058 100644 --- a/jedi/parser/__init__.py +++ b/jedi/parser/__init__.py @@ -133,7 +133,7 @@ class Parser(object): if self._parsed is not None: return self._parsed - self._parsed = self.pgen_parser.parse(self._tokenize(tokenizer)) + self._parsed = self.pgen_parser.parse(tokenizer) if self._start_symbol == 'file_input' != self._parsed.type: # If there's only one statement, we get back a non-module. That's @@ -149,10 +149,6 @@ class Parser(object): # TODO rename to get_root_node return self._parsed - def _tokenize(self, tokenizer): - for typ, value, start_pos, prefix in tokenizer: - yield typ, value, prefix, start_pos - def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix, add_token_callback): raise ParseError @@ -300,6 +296,9 @@ class ParserWithRecovery(Parser): self.module.path = module_path self.module.global_names = self._global_names + def parse(self, tokenizer): + return super().parse(self._tokenize(self._tokenize(tokenizer))) + def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix, add_token_callback): """ @@ -398,7 +397,7 @@ class ParserWithRecovery(Parser): elif typ == INDENT: self._indent_counter += 1 - yield typ, value, prefix, start_pos + yield typ, value, start_pos, prefix def __repr__(self): return "<%s: %s>" % (type(self).__name__, self.module) diff --git a/jedi/parser/pgen2/parse.py b/jedi/parser/pgen2/parse.py index 47573eef..0fafd306 100644 --- a/jedi/parser/pgen2/parse.py +++ b/jedi/parser/pgen2/parse.py @@ -119,7 +119,7 @@ class PgenParser(object): self.error_recovery = error_recovery def parse(self, tokenizer): - for type_, value, prefix, start_pos in tokenizer: + for type_, value, start_pos, prefix in tokenizer: if self.addtoken(type_, value, prefix, start_pos): break else: diff --git a/jedi/parser/tree.py b/jedi/parser/tree.py index a4890d33..543dd34b 100644 --- a/jedi/parser/tree.py +++ b/jedi/parser/tree.py @@ -1060,7 +1060,7 @@ class Function(ClassOrFunc): def _get_paramlist_code(self): return self.children[2].get_code() - + @property def doc(self): """ Return a document string including call signature. """ @@ -1110,7 +1110,7 @@ class Lambda(Function): def _get_paramlist_code(self): return '(' + ''.join(param.get_code() for param in self.params).strip() + ')' - + @property def params(self): return self.children[1:-2]