diff --git a/jedi/common.py b/jedi/common.py index 5c42e0b0..e188f781 100644 --- a/jedi/common.py +++ b/jedi/common.py @@ -29,10 +29,11 @@ class PushBackIterator(object): class NoErrorTokenizer(object): - def __init__(self, readline, line_offset=0): + def __init__(self, readline, line_offset=0, stop_on_scope=False): self.readline = readline self.gen = PushBackIterator(tokenize.generate_tokens(readline)) self.line_offset = line_offset + self.stop_on_scope = stop_on_scope def push_last_back(self): self.gen.push_back(self.current) @@ -61,7 +62,13 @@ class NoErrorTokenizer(object): self.gen = PushBackIterator(tokenize.generate_tokens( self.readline)) self.current = self.next() + c = list(self.current) + + # stop if a new class or definition is started at position zero. + if self.stop_on_scope and c[1] in ['def', 'class'] and c[2][1] == 0: + raise StopIteration() + c[2] = self.line_offset + c[2][0], c[2][1] return c diff --git a/jedi/parsing.py b/jedi/parsing.py index e5c32754..fe656628 100644 --- a/jedi/parsing.py +++ b/jedi/parsing.py @@ -244,6 +244,8 @@ class Module(Scope): self._name = None self.used_names = {} self.temp_used_names = [] + # this may be changed depending on fast_parser + self.line_offset = 0 def add_global(self, name): """ @@ -1137,10 +1139,10 @@ class PyFuzzyParser(object): :param user_position: The line/column, the user is currently on. :type user_position: tuple(int, int) :param no_docstr: If True, a string at the beginning is not a docstr. - :param tokenize_gen: A prepared tokenize generator -> for fast_parser + :param stop_on_scope: Stop if a scope appears -> for fast_parser """ def __init__(self, code, module_path=None, user_position=None, - no_docstr=False, line_offset=0, tokenize_gen=None): + no_docstr=False, line_offset=0, stop_on_scope=None): self.user_position = user_position self.user_scope = None self.user_stmt = None @@ -1157,12 +1159,10 @@ class PyFuzzyParser(object): # any errors of tokenize and just parse ahead. self._line_offset = line_offset - if tokenize_gen is None: - code = code + '\n' # end with \n, because the parser needs it - buf = StringIO(code) - self.gen = common.NoErrorTokenizer(buf.readline, line_offset) - else: - self.gen = tokenize_gen + code = code + '\n' # end with \n, because the parser needs it + buf = StringIO(code) + self.gen = common.NoErrorTokenizer(buf.readline, line_offset, + stop_on_scope) self.parse() def __repr__(self):