protect token_list -> _token_list

This commit is contained in:
Dave Halter
2014-03-04 12:32:34 +01:00
parent 7de4b14461
commit 65ce609a3c
5 changed files with 20 additions and 36 deletions

View File

@@ -364,7 +364,7 @@ def filter_private_variable(scope, call_scope, var_name):
def _evaluate_list_comprehension(lc, parent=None): def _evaluate_list_comprehension(lc, parent=None):
input = lc.input input = lc.input
nested_lc = lc.input.token_list[0] nested_lc = input.expression_list()[0]
if isinstance(nested_lc, pr.ListComprehension): if isinstance(nested_lc, pr.ListComprehension):
# is nested LC # is nested LC
input = nested_lc.stmt input = nested_lc.stmt

View File

@@ -185,7 +185,7 @@ class NameFinder(object):
types = [] types = []
if stmt.is_global(): if stmt.is_global():
# global keyword handling. # global keyword handling.
for token_name in stmt.token_list[1:]: for token_name in stmt._token_list[1:]:
if isinstance(token_name, pr.Name): if isinstance(token_name, pr.Name):
return evaluator.find_types(stmt.parent, str(token_name)) return evaluator.find_types(stmt.parent, str(token_name))
else: else:

View File

@@ -568,7 +568,7 @@ class Parser(object):
stmt, tok = self._parse_statement(self._gen.current) stmt, tok = self._parse_statement(self._gen.current)
if stmt: if stmt:
self._scope.add_statement(stmt) self._scope.add_statement(stmt)
for t in stmt.token_list: for t in stmt._token_list:
if isinstance(t, pr.Name): if isinstance(t, pr.Name):
# add the global to the top, because there it is # add the global to the top, because there it is
# important. # important.

View File

@@ -816,7 +816,7 @@ class Statement(Simple, DocstringMixin):
:type start_pos: 2-tuple of int :type start_pos: 2-tuple of int
:param start_pos: Position (line, column) of the Statement. :param start_pos: Position (line, column) of the Statement.
""" """
__slots__ = ('token_list', '_set_vars', 'as_names', '_expression_list', __slots__ = ('_token_list', '_set_vars', 'as_names', '_expression_list',
'_assignment_details', '_names_are_set_vars', '_doc_token') '_assignment_details', '_names_are_set_vars', '_doc_token')
def __init__(self, module, token_list, start_pos, end_pos, parent=None, def __init__(self, module, token_list, start_pos, end_pos, parent=None,
@@ -824,7 +824,7 @@ class Statement(Simple, DocstringMixin):
super(Statement, self).__init__(module, start_pos, end_pos) super(Statement, self).__init__(module, start_pos, end_pos)
if isinstance(start_pos, list): if isinstance(start_pos, list):
raise NotImplementedError() raise NotImplementedError()
self.token_list = token_list self._token_list = token_list
self._names_are_set_vars = names_are_set_vars self._names_are_set_vars = names_are_set_vars
if set_name_parents: if set_name_parents:
for t in token_list: for t in token_list:
@@ -839,7 +839,6 @@ class Statement(Simple, DocstringMixin):
# cache # cache
self._assignment_details = [] self._assignment_details = []
# this is important for other scripts
def get_code(self, new_line=True): def get_code(self, new_line=True):
def assemble(command_list, assignment=None): def assemble(command_list, assignment=None):
@@ -893,7 +892,7 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
def is_global(self): def is_global(self):
# first keyword of the first token is global -> must be a global # first keyword of the first token is global -> must be a global
tok = self.token_list[0] tok = self._token_list[0]
return isinstance(tok, Name) and str(tok) == "global" return isinstance(tok, Name) and str(tok) == "global"
@property @property
@@ -985,12 +984,8 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
if lambd is not None: if lambd is not None:
token_list.append(lambd) token_list.append(lambd)
elif tok == 'for': elif tok == 'for':
list_comp, tok = parse_list_comp( list_comp, tok = parse_list_comp(token_iterator, token_list,
token_iterator, start_pos, tok.end_pos)
token_list,
start_pos,
tok.end_pos
)
if list_comp is not None: if list_comp is not None:
token_list = [list_comp] token_list = [list_comp]
@@ -1015,14 +1010,8 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
if not token_list: if not token_list:
return None, tok return None, tok
statement = stmt_class( statement = stmt_class(self._sub_module, token_list, start_pos,
self._sub_module, end_pos, self.parent, set_name_parents=False)
token_list,
start_pos,
end_pos,
self.parent,
set_name_parents=False
)
return statement, tok return statement, tok
def parse_lambda(token_iterator): def parse_lambda(token_iterator):
@@ -1038,10 +1027,11 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
break break
# TODO uncomment and run `./run.py func 395 --debug` shouldn't parse all statements. # TODO uncomment and run `./run.py func 395 --debug` shouldn't parse all statements.
#print tok, tok.start_pos #print tok, tok.start_pos
#raise NotImplementedError()
if tok != ':': if tok != ':':
return None, tok return None, tok
# since lambda is a Function scope, it needs Scope parents # Since Lambda is a Function scope, it needs Scope parents.
parent = self.get_parent_until(IsScope) parent = self.get_parent_until(IsScope)
lambd = Lambda(self._sub_module, params, start_pos, parent) lambd = Lambda(self._sub_module, params, start_pos, parent)
@@ -1065,14 +1055,14 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
added_breaks=added_breaks) added_breaks=added_breaks)
token_list = [] token_list = []
for stmt in arr: for stmt in arr:
token_list += stmt.token_list token_list += stmt._token_list
start_pos = arr.start_pos[0], arr.start_pos[1] - 1 start_pos = arr.start_pos[0], arr.start_pos[1] - 1
stmt = Statement(self._sub_module, token_list, stmt = Statement(self._sub_module, token_list,
start_pos, arr.end_pos) start_pos, arr.end_pos)
arr.parent = stmt arr.parent = stmt
stmt.token_list = stmt._expression_list = [arr] stmt._token_list = stmt._expression_list = [arr]
else: else:
for t in stmt.token_list: for t in stmt._token_list:
if isinstance(t, Name): if isinstance(t, Name):
t.parent = stmt t.parent = stmt
stmt._names_are_set_vars = names_are_set_vars stmt._names_are_set_vars = names_are_set_vars
@@ -1099,7 +1089,7 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
brackets = {'(': Array.TUPLE, '[': Array.LIST, '{': Array.SET} brackets = {'(': Array.TUPLE, '[': Array.LIST, '{': Array.SET}
closing_brackets = ')', '}', ']' closing_brackets = ')', '}', ']'
token_iterator = iter(self.token_list) token_iterator = iter(self._token_list)
for tok in token_iterator: for tok in token_iterator:
if isinstance(tok, tokenize.Token): if isinstance(tok, tokenize.Token):
token_type = tok.type token_type = tok.type
@@ -1151,14 +1141,8 @@ isinstance(c, (tokenize.Token, Operator)) else unicode(c)
is_chain = True is_chain = True
elif tok_str == ',': # implies a tuple elif tok_str == ',': # implies a tuple
# expression is now an array not a statement anymore # expression is now an array not a statement anymore
stmt = Statement( stmt = Statement(self._sub_module, result, result[0].start_pos,
self._sub_module, tok.end_pos, self.parent, set_name_parents=False)
result,
result[0].start_pos,
tok.end_pos,
self.parent,
set_name_parents=False
)
stmt._expression_list = result stmt._expression_list = result
arr, break_tok = parse_array(token_iterator, Array.TUPLE, arr, break_tok = parse_array(token_iterator, Array.TUPLE,
stmt.start_pos, stmt) stmt.start_pos, stmt)

View File

@@ -13,7 +13,7 @@ class TokenTest(unittest.TestCase):
def testit(): def testit():
a = "huhu" a = "huhu"
''')) '''))
tok = parsed.module.subscopes[0].statements[0].token_list[2] tok = parsed.module.subscopes[0].statements[0]._token_list[2]
self.assertEqual(tok.end_pos, (3, 14)) self.assertEqual(tok.end_pos, (3, 14))
def test_end_pos_multi_line(self): def test_end_pos_multi_line(self):
@@ -22,5 +22,5 @@ def testit():
a = """huhu a = """huhu
asdfasdf""" + "h" asdfasdf""" + "h"
''')) '''))
tok = parsed.module.subscopes[0].statements[0].token_list[2] tok = parsed.module.subscopes[0].statements[0]._token_list[2]
self.assertEqual(tok.end_pos, (4, 11)) self.assertEqual(tok.end_pos, (4, 11))