From 36b75c3e15d2ee45b55a8783f520e7f86eebe8ad Mon Sep 17 00:00:00 2001 From: David Halter Date: Sun, 16 Dec 2012 03:50:50 +0100 Subject: [PATCH] fixed the last remaining problems with fast_parser (not cached yet) --- jedi/common.py | 6 +++++- jedi/fast_parser.py | 7 +++++++ jedi/parsing.py | 6 +++--- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/jedi/common.py b/jedi/common.py index c67fb6b7..90eea68e 100644 --- a/jedi/common.py +++ b/jedi/common.py @@ -62,6 +62,7 @@ class NoErrorTokenizer(object): self.line_offset = line_offset self.stop_on_scope = stop_on_scope self.first_scope = False + self.closed = False def push_last_back(self): self.gen.push_back(self.current) @@ -71,6 +72,8 @@ class NoErrorTokenizer(object): return self.__next__() def __next__(self): + if self.closed: + raise MultiLevelStopIteration() try: self.current = next(self.gen) except tokenize.TokenError: @@ -98,7 +101,8 @@ class NoErrorTokenizer(object): breaks = ['def', 'class', '@'] if self.stop_on_scope and c[1] in breaks and c[2][1] == 0: if self.first_scope: - raise StopIteration + self.closed = True + raise MultiLevelStopIteration() elif c[1] != '@': self.first_scope = True diff --git a/jedi/fast_parser.py b/jedi/fast_parser.py index 0b9359f6..af416dee 100644 --- a/jedi/fast_parser.py +++ b/jedi/fast_parser.py @@ -183,6 +183,13 @@ class FastParser(use_metaclass(CachedFastParser)): r'(?=\n(?:def|class|@)|$)' parts = re.findall(r, code, re.DOTALL) + if len(parts) > 1 and not re.match('def|class|@', parts[0]): + # Merge the first two because `common.NoErrorTokenizer` is not able + # to know if there's a class/func or not. + # Therefore every part has it's own class/func. Exactly one. + parts[0] += parts[1] + parts.pop(1) + line_offset = 0 start = 0 p = None diff --git a/jedi/parsing.py b/jedi/parsing.py index 114a9c54..02315ec3 100644 --- a/jedi/parsing.py +++ b/jedi/parsing.py @@ -1180,7 +1180,7 @@ class PyFuzzyParser(object): self.top_module = top_module or self.module try: self.parse() - except StopIteration: + except common.MultiLevelStopIteration: # sometimes StopIteration isn't catched. Just ignore it. pass @@ -1566,7 +1566,7 @@ class PyFuzzyParser(object): string = set_string if set_string is not None else string + tok token_type, tok = self.next() - except StopIteration: + except (StopIteration, common.MultiLevelStopIteration): # comes from tokenizer break @@ -1606,7 +1606,7 @@ class PyFuzzyParser(object): try: type, tok, self.start_pos, self.end_pos, \ self.parserline = next(self.gen) - except StopIteration: + except (StopIteration, common.MultiLevelStopIteration): # on finish, set end_pos correctly s = self.scope while s is not None: