diff --git a/jedi/parser/__init__.py b/jedi/parser/__init__.py index dc23c058..cef7c9ef 100644 --- a/jedi/parser/__init__.py +++ b/jedi/parser/__init__.py @@ -336,7 +336,7 @@ class ParserWithRecovery(Parser): # print('err', token.tok_name[typ], repr(value), start_pos, len(stack), index) if self._stack_removal(grammar, stack, arcs, index + 1, value, start_pos): - add_token_callback(typ, value, prefix, start_pos) + add_token_callback(typ, value, start_pos, prefix) else: if typ == INDENT: # For every deleted INDENT we have to delete a DEDENT as well. diff --git a/jedi/parser/pgen2/parse.py b/jedi/parser/pgen2/parse.py index 0fafd306..9e74838c 100644 --- a/jedi/parser/pgen2/parse.py +++ b/jedi/parser/pgen2/parse.py @@ -120,7 +120,7 @@ class PgenParser(object): def parse(self, tokenizer): for type_, value, start_pos, prefix in tokenizer: - if self.addtoken(type_, value, prefix, start_pos): + if self.addtoken(type_, value, start_pos, prefix): break else: # We never broke out -- EOF is too soon -- Unfinished statement. @@ -130,7 +130,7 @@ class PgenParser(object): raise InternalParseError("incomplete input", type_, value, start_pos) return self.rootnode - def addtoken(self, type_, value, prefix, start_pos): + def addtoken(self, type_, value, start_pos, prefix): """Add a token; return True if this is the end of the program.""" ilabel = token_to_ilabel(self.grammar, type_, value)