parentheses work now in import statements (tested)

This commit is contained in:
David Halter
2012-07-13 01:00:44 +02:00
parent 4739eb6c02
commit 972b4bd13d
4 changed files with 40 additions and 5 deletions

View File

@@ -2,6 +2,7 @@ import re
import sys
import os
import types
import io
import inspect
import debug
@@ -71,7 +72,7 @@ class Parser(CachedModule):
}
if sys.hexversion >= 0x03000000:
map_types['file object'] = 'import io; return io.TextWrapper(file)'
map_types['file object'] = 'import io; return io.TextIOWrapper(file)'
module_cache = {}
@@ -244,7 +245,11 @@ class Parser(CachedModule):
# variables
for name, value in stmts.items():
if type(value) == types.FileType:
if sys.hexversion >= 0x03000000:
file_type = io.TextIOWrapper
else:
file_type = types.FileType
if type(value) == file_type:
value = 'open()'
elif type(value).__name__ in ['int', 'bool', 'float',
'dict', 'list', 'tuple']:

View File

@@ -32,6 +32,7 @@ from _compatibility import next, literal_eval, tokenize_func, BytesIO
import tokenize
import re
import keyword
import debug
@@ -1007,9 +1008,18 @@ class PyFuzzyParser(object):
:rtype: list
"""
imports = []
brackets = False
continue_kw = [",", ";", "\n", ')'] \
+ list(set(keyword.kwlist) - set(['as']))
while True:
token_type, tok, indent = self.next()
if brackets and tok == '\n':
self.next()
if tok == '(': # python allows only one `(` in the statement.
brackets = True
self.next()
name, token_type, tok, start_indent, start_line = \
self._parsedotname()
self._parsedotname(self.current)
if not name:
break
name2 = None
@@ -1019,9 +1029,9 @@ class PyFuzzyParser(object):
name2 = Name(name2, start_indent2, start_line, self.line_nr)
i = Name(name, start_indent, start_line, self.line_nr)
imports.append((i, name2))
while tok not in [",", ";", "\n"]:
while tok not in continue_kw:
token_type, tok, indent = self.next()
if tok != ",":
if not (tok == "," or brackets and tok == '\n'):
break
return imports

View File

@@ -14,6 +14,23 @@ import os
#? ['dirname']
os.path.dirname
from itertools import (tee,
islice)
#? ['islice']
islice
from functools import (partial, wraps)
#? ['wraps']
wraps
from keyword import kwlist, \
iskeyword
#? ['kwlist']
kwlist
from tokenize import io
tokenize.generate_tokens
# -----------------
# builtins
# -----------------

View File

@@ -1,3 +1,6 @@
from a import (b
def blub():
return 0
def openbrace():
asdf = 3
asdf