mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-09 07:14:48 +08:00
Generalize the tuple order of tokens.
This commit is contained in:
@@ -133,7 +133,7 @@ class Parser(object):
|
|||||||
if self._parsed is not None:
|
if self._parsed is not None:
|
||||||
return self._parsed
|
return self._parsed
|
||||||
|
|
||||||
self._parsed = self.pgen_parser.parse(self._tokenize(tokenizer))
|
self._parsed = self.pgen_parser.parse(tokenizer)
|
||||||
|
|
||||||
if self._start_symbol == 'file_input' != self._parsed.type:
|
if self._start_symbol == 'file_input' != self._parsed.type:
|
||||||
# If there's only one statement, we get back a non-module. That's
|
# If there's only one statement, we get back a non-module. That's
|
||||||
@@ -149,10 +149,6 @@ class Parser(object):
|
|||||||
# TODO rename to get_root_node
|
# TODO rename to get_root_node
|
||||||
return self._parsed
|
return self._parsed
|
||||||
|
|
||||||
def _tokenize(self, tokenizer):
|
|
||||||
for typ, value, start_pos, prefix in tokenizer:
|
|
||||||
yield typ, value, prefix, start_pos
|
|
||||||
|
|
||||||
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
||||||
add_token_callback):
|
add_token_callback):
|
||||||
raise ParseError
|
raise ParseError
|
||||||
@@ -300,6 +296,9 @@ class ParserWithRecovery(Parser):
|
|||||||
self.module.path = module_path
|
self.module.path = module_path
|
||||||
self.module.global_names = self._global_names
|
self.module.global_names = self._global_names
|
||||||
|
|
||||||
|
def parse(self, tokenizer):
|
||||||
|
return super().parse(self._tokenize(self._tokenize(tokenizer)))
|
||||||
|
|
||||||
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
def error_recovery(self, grammar, stack, arcs, typ, value, start_pos, prefix,
|
||||||
add_token_callback):
|
add_token_callback):
|
||||||
"""
|
"""
|
||||||
@@ -398,7 +397,7 @@ class ParserWithRecovery(Parser):
|
|||||||
elif typ == INDENT:
|
elif typ == INDENT:
|
||||||
self._indent_counter += 1
|
self._indent_counter += 1
|
||||||
|
|
||||||
yield typ, value, prefix, start_pos
|
yield typ, value, start_pos, prefix
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s: %s>" % (type(self).__name__, self.module)
|
return "<%s: %s>" % (type(self).__name__, self.module)
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ class PgenParser(object):
|
|||||||
self.error_recovery = error_recovery
|
self.error_recovery = error_recovery
|
||||||
|
|
||||||
def parse(self, tokenizer):
|
def parse(self, tokenizer):
|
||||||
for type_, value, prefix, start_pos in tokenizer:
|
for type_, value, start_pos, prefix in tokenizer:
|
||||||
if self.addtoken(type_, value, prefix, start_pos):
|
if self.addtoken(type_, value, prefix, start_pos):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1060,7 +1060,7 @@ class Function(ClassOrFunc):
|
|||||||
|
|
||||||
def _get_paramlist_code(self):
|
def _get_paramlist_code(self):
|
||||||
return self.children[2].get_code()
|
return self.children[2].get_code()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def doc(self):
|
def doc(self):
|
||||||
""" Return a document string including call signature. """
|
""" Return a document string including call signature. """
|
||||||
@@ -1110,7 +1110,7 @@ class Lambda(Function):
|
|||||||
|
|
||||||
def _get_paramlist_code(self):
|
def _get_paramlist_code(self):
|
||||||
return '(' + ''.join(param.get_code() for param in self.params).strip() + ')'
|
return '(' + ''.join(param.get_code() for param in self.params).strip() + ')'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def params(self):
|
def params(self):
|
||||||
return self.children[1:-2]
|
return self.children[1:-2]
|
||||||
|
|||||||
Reference in New Issue
Block a user