diff --git a/jedi/modules.py b/jedi/modules.py index 42bf811a..85a835da 100644 --- a/jedi/modules.py +++ b/jedi/modules.py @@ -118,9 +118,13 @@ class ModuleWithCursor(Module): string = '' level = 0 force_point = False + last_type = None try: for token_type, tok, start, end, line in gen: #print 'tok', token_type, tok, force_point + if last_type == token_type == tokenize.NAME: + string += ' ' + if level > 0: if tok in close_brackets: level += 1 @@ -146,6 +150,7 @@ class ModuleWithCursor(Module): self._column_temp = self._line_length - end[1] string += tok + last_type = token_type except tokenize.TokenError: debug.warning("Tokenize couldn't finish", sys.exc_info) diff --git a/jedi/parsing.py b/jedi/parsing.py index c3e8cd42..f4e8e24a 100644 --- a/jedi/parsing.py +++ b/jedi/parsing.py @@ -1322,6 +1322,7 @@ class PyFuzzyParser(object): n = Name(self.module, names, first_pos, self.end_pos) if names \ else None + #if self.module.path != '__builtin__': print n return n, token_type, tok def _parseimportlist(self): @@ -1555,8 +1556,9 @@ class PyFuzzyParser(object): if ret is not None: ret.parent = lambd lambd.returns.append(ret) - lambd.end_pos = ret.end_pos lambd.parent = self.scope + lambd.end_pos = self.end_pos + #print lambd, added_breaks, ret, param tok_list[-1] = lambd continue elif token_type == tokenize.NAME: