catched some errors of invalid code, that was caused by imbalanced closing brackets

This commit is contained in:
David Halter
2012-08-07 10:09:59 +02:00
parent f4db5e8a47
commit 11808d8fa3
3 changed files with 43 additions and 27 deletions

View File

@@ -205,6 +205,12 @@ def prepare_goto(source, position, source_path, module, goto_path,
debug.dbg('start: %s in %s' % (goto_path, scope)) debug.dbg('start: %s in %s' % (goto_path, scope))
user_stmt = module.parser.user_stmt user_stmt = module.parser.user_stmt
if not user_stmt and len(goto_path.split('\n')) > 1:
# If the user_stmt is not defined and the goto_path is multi line,
# something's strange. Most probably the backwards tokenizer matched to
# much.
return []
if isinstance(user_stmt, parsing.Import): if isinstance(user_stmt, parsing.Import):
scopes = [imports.ImportPath(user_stmt, is_like_search)] scopes = [imports.ImportPath(user_stmt, is_like_search)]
else: else:

View File

@@ -1,9 +1,11 @@
from __future__ import with_statement from __future__ import with_statement
import re import re
import tokenize import tokenize
import sys
import parsing import parsing
import builtin import builtin
import debug
files = {} files = {}
load_module_cb = None load_module_cb = None
@@ -63,7 +65,7 @@ class ModuleWithCursor(Module):
else: else:
line = line + '\n' line = line + '\n'
# add lines with a backslash at the end # add lines with a backslash at the end
while self._line_temp > 1: while 1:
self._line_temp -= 1 self._line_temp -= 1
last_line = self.get_line(self._line_temp) last_line = self.get_line(self._line_temp)
if last_line and last_line[-1] == '\\': if last_line and last_line[-1] == '\\':
@@ -81,34 +83,37 @@ class ModuleWithCursor(Module):
gen = tokenize.generate_tokens(fetch_line) gen = tokenize.generate_tokens(fetch_line)
string = '' string = ''
level = 0 level = 0
for token_type, tok, start, end, line in gen: try:
#print token_type, tok, force_point for token_type, tok, start, end, line in gen:
if level > 0: #print token_type, tok, force_point
if tok in close_brackets: if level > 0:
level += 1 if tok in close_brackets:
if tok in open_brackets: level += 1
level -= 1 if tok in open_brackets:
elif tok == '.': level -= 1
force_point = False elif tok == '.':
elif force_point:
# it is reversed, therefore a number is getting recognized
# as a floating point number
if token_type == tokenize.NUMBER and tok[0] == '.':
force_point = False force_point = False
elif force_point:
# it is reversed, therefore a number is getting recognized
# as a floating point number
if token_type == tokenize.NUMBER and tok[0] == '.':
force_point = False
else:
#print 'break2', token_type, tok
break
elif tok in close_brackets:
level += 1
elif token_type in [tokenize.NAME, tokenize.STRING]:
force_point = True
elif token_type == tokenize.NUMBER:
pass
else: else:
#print 'break2', token_type, tok #print 'break', token_type, tok
break break
elif tok in close_brackets:
level += 1
elif token_type in [tokenize.NAME, tokenize.STRING]:
force_point = True
elif token_type == tokenize.NUMBER:
pass
else:
#print 'break', token_type, tok
break
string += tok string += tok
except tokenize.TokenError:
debug.warning("Tokenize couldn't finish", sys.exc_info)
return string[::-1] return string[::-1]
@@ -125,6 +130,8 @@ class ModuleWithCursor(Module):
if not self._line_cache: if not self._line_cache:
self._line_cache = self.source.split('\n') self._line_cache = self.source.split('\n')
if line < 1:
raise StopIteration()
try: try:
return self._line_cache[line - 1] return self._line_cache[line - 1]
except IndexError: except IndexError:

View File

@@ -1,3 +1,6 @@
#? ['message']
AttributeError()).message
from a import (b from a import (b
def blub(): def blub():
return 0 return 0
@@ -53,5 +56,5 @@ try:
#? str() #? str()
"" ""
#? ['lala'] #? []
isinstance()) int()).