From 3e9b72b636108ad8a44a31462cf55804f6e68863 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Thu, 20 Feb 2014 01:17:19 +0100 Subject: [PATCH] created a PushBackTokenizer specifically for the parser --- jedi/parser/__init__.py | 40 ++++++++++++++++++++++++++++++++++++++-- jedi/parser/tokenize.py | 7 ------- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/jedi/parser/__init__.py b/jedi/parser/__init__.py index 3e38690b..f69cf077 100644 --- a/jedi/parser/__init__.py +++ b/jedi/parser/__init__.py @@ -47,8 +47,9 @@ class Parser(object): self._scope = self.module self._current = (None, None) - self._gen = tokenizer or tokenize.NoErrorTokenizer(source) - self._gen = tokenize.NoErrorTokenizer(source, offset, is_fast) + tokenizer = tokenizer or tokenize.NoErrorTokenizer(source) + tokenizer = tokenize.NoErrorTokenizer(source, offset, is_fast) + self._gen = PushBackTokenizer(tokenizer) self._top_module = top_module or self.module try: self._parse() @@ -633,3 +634,38 @@ class Parser(object): tokenize.tok_name[token_type], self.start_pos) continue self.no_docstr = False + + +class PushBackTokenizer(object): + def __init__(self, tokenizer): + self._tokenizer = tokenizer + self._push_backs = [] + self.current = [None, None, (0, 0), (0, 0), ''] + + def push_last_back(self): + self._push_backs.append(self.current) + + def next(self): + """ Python 2 Compatibility """ + return self.__next__() + + def __next__(self): + if self._push_backs: + return self._push_backs.pop(0) + + self.current = next(self._tokenizer) + + def close(): + if not self.first_stmt: + self.closed = True + raise common.MultiLevelStopIteration() + # ignore indents/comments + return self.current + + @property + def previous(self): + return self._tokenizer.previous + + @property + def last_previous(self): + return self._tokenizer.last_previous diff --git a/jedi/parser/tokenize.py b/jedi/parser/tokenize.py index 482678f9..d480705c 100644 --- a/jedi/parser/tokenize.py +++ b/jedi/parser/tokenize.py @@ -303,8 +303,6 @@ class NoErrorTokenizer(object): self.source = source self.gen = source_tokens(source, line_offset) self.closed = False - self.is_first = True - self.push_backs = [] # fast parser options self.is_fast_parser = is_fast_parser @@ -315,9 +313,6 @@ class NoErrorTokenizer(object): self.is_decorator = False self.first_stmt = True - def push_last_back(self): - self.push_backs.append(self.current) - def next(self): """ Python 2 Compatibility """ return self.__next__() @@ -325,8 +320,6 @@ class NoErrorTokenizer(object): def __next__(self): if self.closed: raise common.MultiLevelStopIteration() - if self.push_backs: - return self.push_backs.pop(0) self.last_previous = self.previous self.previous = self.current