if tokenize died, the wrong tokenizer was created again

This commit is contained in:
David Halter
2012-09-13 02:29:17 +02:00
parent 1a4de1bc68
commit 77839b0a7d

View File

@@ -1697,5 +1697,5 @@ class PyFuzzyParser(object):
self._tokenize_end_pos = (0, 0)
debug.warning('indentation error on line %s, ignoring it' %
(self.start_pos[0]))
self.gen = tokenize.generate_tokens(buf.readline)
self.gen = PushBackIterator(tokenize_func(buf.readline))
return self.module