parser: remove current

This commit is contained in:
Dave Halter
2014-02-24 20:43:00 +01:00
parent 50f8b8bf0c
commit a5a6e9ac12

View File

@@ -45,7 +45,6 @@ class Parser(object):
# initialize global Scope # initialize global Scope
self.module = pr.SubModule(module_path, self.start_pos, top_module) self.module = pr.SubModule(module_path, self.start_pos, top_module)
self._scope = self.module self._scope = self.module
self._current = (None, None)
tokenizer = tokenizer or tokenize.source_tokens(source) tokenizer = tokenizer or tokenize.source_tokens(source)
self._gen = PushBackTokenizer(tokenizer) self._gen = PushBackTokenizer(tokenizer)
@@ -70,7 +69,7 @@ class Parser(object):
# because of `self.module.used_names`. # because of `self.module.used_names`.
d.parent = self.module d.parent = self.module
if self._current[0] in (tokenize.NEWLINE,): if self._gen.current.type in (tokenize.NEWLINE,):
# This case is only relevant with the FastTokenizer, because # This case is only relevant with the FastTokenizer, because
# otherwise there's always an EndMarker. # otherwise there's always an EndMarker.
# we added a newline before, so we need to "remove" it again. # we added a newline before, so we need to "remove" it again.
@@ -319,16 +318,12 @@ class Parser(object):
try: try:
# print 'parse_stmt', tok, tokenize.tok_name[token_type] # print 'parse_stmt', tok, tokenize.tok_name[token_type]
tok_list.append( tok_list.append(
token_pr.Token.from_tuple( token_pr.Token.from_tuple(self._gen.current)
self._current + (self.start_pos,)
)
) )
if tok.string == 'as': if tok.string == 'as':
tok = self.next() tok = self.next()
if tok.type == tokenize.NAME: if tok.type == tokenize.NAME:
n, tok = self._parse_dot_name( n, tok = self._parse_dot_name(self._gen.current)
self._current
)
if n: if n:
set_vars.append(n) set_vars.append(n)
as_names.append(n) as_names.append(n)
@@ -339,7 +334,7 @@ class Parser(object):
if tok.string == 'lambda': if tok.string == 'lambda':
breaks.discard(':') breaks.discard(':')
elif tok.type == tokenize.NAME: elif tok.type == tokenize.NAME:
n, tok = self._parse_dot_name(self._current) n, tok = self._parse_dot_name(self._gen.current)
# removed last entry, because we add Name # removed last entry, because we add Name
tok_list.pop() tok_list.pop()
if n: if n:
@@ -399,13 +394,13 @@ class Parser(object):
def __next__(self): def __next__(self):
""" Generate the next tokenize pattern. """ """ Generate the next tokenize pattern. """
#typ, tok, start_pos, end_pos = next(self._gen) #typ, tok, start_pos, end_pos = next(self._gen)
self._current = next(self._gen) _current = next(self._gen)
# dedents shouldn't change positions # dedents shouldn't change positions
self.start_pos = self._current.start self.start_pos = _current.start
self.end_pos = self._current.end self.end_pos = _current.end
#self._current = typ, tok #self._current = typ, tok
return self._current return _current
def _parse(self): def _parse(self):
""" """
@@ -490,7 +485,7 @@ class Parser(object):
break break
relative_count += 1 relative_count += 1
# the from import # the from import
mod, tok = self._parse_dot_name(self._current) mod, tok = self._parse_dot_name(self._gen.current)
tok_str = tok.string tok_str = tok.string
if str(mod) == 'import' and relative_count: if str(mod) == 'import' and relative_count:
self._gen.push_last_back() self._gen.push_last_back()
@@ -587,7 +582,7 @@ class Parser(object):
debug.warning('return in non-function') debug.warning('return in non-function')
# globals # globals
elif tok_str == 'global': elif tok_str == 'global':
stmt, tok = self._parse_statement(self._current) stmt, tok = self._parse_statement(self._gen.current)
if stmt: if stmt:
self._scope.add_statement(stmt) self._scope.add_statement(stmt)
for t in stmt.token_list: for t in stmt.token_list:
@@ -614,7 +609,7 @@ class Parser(object):
# this is the main part - a name can be a function or a # this is the main part - a name can be a function or a
# normal var, which can follow anything. but this is done # normal var, which can follow anything. but this is done
# by the statement parser. # by the statement parser.
stmt, tok = self._parse_statement(self._current) stmt, tok = self._parse_statement(self._gen.current)
if stmt: if stmt:
self._scope.add_statement(stmt) self._scope.add_statement(stmt)
self.freshscope = False self.freshscope = False