mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-06 14:04:26 +08:00
created a PushBackTokenizer specifically for the parser
This commit is contained in:
@@ -47,8 +47,9 @@ class Parser(object):
|
|||||||
self._scope = self.module
|
self._scope = self.module
|
||||||
self._current = (None, None)
|
self._current = (None, None)
|
||||||
|
|
||||||
self._gen = tokenizer or tokenize.NoErrorTokenizer(source)
|
tokenizer = tokenizer or tokenize.NoErrorTokenizer(source)
|
||||||
self._gen = tokenize.NoErrorTokenizer(source, offset, is_fast)
|
tokenizer = tokenize.NoErrorTokenizer(source, offset, is_fast)
|
||||||
|
self._gen = PushBackTokenizer(tokenizer)
|
||||||
self._top_module = top_module or self.module
|
self._top_module = top_module or self.module
|
||||||
try:
|
try:
|
||||||
self._parse()
|
self._parse()
|
||||||
@@ -633,3 +634,38 @@ class Parser(object):
|
|||||||
tokenize.tok_name[token_type], self.start_pos)
|
tokenize.tok_name[token_type], self.start_pos)
|
||||||
continue
|
continue
|
||||||
self.no_docstr = False
|
self.no_docstr = False
|
||||||
|
|
||||||
|
|
||||||
|
class PushBackTokenizer(object):
|
||||||
|
def __init__(self, tokenizer):
|
||||||
|
self._tokenizer = tokenizer
|
||||||
|
self._push_backs = []
|
||||||
|
self.current = [None, None, (0, 0), (0, 0), '']
|
||||||
|
|
||||||
|
def push_last_back(self):
|
||||||
|
self._push_backs.append(self.current)
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
""" Python 2 Compatibility """
|
||||||
|
return self.__next__()
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
if self._push_backs:
|
||||||
|
return self._push_backs.pop(0)
|
||||||
|
|
||||||
|
self.current = next(self._tokenizer)
|
||||||
|
|
||||||
|
def close():
|
||||||
|
if not self.first_stmt:
|
||||||
|
self.closed = True
|
||||||
|
raise common.MultiLevelStopIteration()
|
||||||
|
# ignore indents/comments
|
||||||
|
return self.current
|
||||||
|
|
||||||
|
@property
|
||||||
|
def previous(self):
|
||||||
|
return self._tokenizer.previous
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_previous(self):
|
||||||
|
return self._tokenizer.last_previous
|
||||||
|
|||||||
@@ -303,8 +303,6 @@ class NoErrorTokenizer(object):
|
|||||||
self.source = source
|
self.source = source
|
||||||
self.gen = source_tokens(source, line_offset)
|
self.gen = source_tokens(source, line_offset)
|
||||||
self.closed = False
|
self.closed = False
|
||||||
self.is_first = True
|
|
||||||
self.push_backs = []
|
|
||||||
|
|
||||||
# fast parser options
|
# fast parser options
|
||||||
self.is_fast_parser = is_fast_parser
|
self.is_fast_parser = is_fast_parser
|
||||||
@@ -315,9 +313,6 @@ class NoErrorTokenizer(object):
|
|||||||
self.is_decorator = False
|
self.is_decorator = False
|
||||||
self.first_stmt = True
|
self.first_stmt = True
|
||||||
|
|
||||||
def push_last_back(self):
|
|
||||||
self.push_backs.append(self.current)
|
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
""" Python 2 Compatibility """
|
""" Python 2 Compatibility """
|
||||||
return self.__next__()
|
return self.__next__()
|
||||||
@@ -325,8 +320,6 @@ class NoErrorTokenizer(object):
|
|||||||
def __next__(self):
|
def __next__(self):
|
||||||
if self.closed:
|
if self.closed:
|
||||||
raise common.MultiLevelStopIteration()
|
raise common.MultiLevelStopIteration()
|
||||||
if self.push_backs:
|
|
||||||
return self.push_backs.pop(0)
|
|
||||||
|
|
||||||
self.last_previous = self.previous
|
self.last_previous = self.previous
|
||||||
self.previous = self.current
|
self.previous = self.current
|
||||||
|
|||||||
Reference in New Issue
Block a user