remove NoErrorTokenizer dependency for all but the fast parsers

This commit is contained in:
Dave Halter
2014-02-24 10:31:15 +01:00
parent 553ff66c8b
commit 9257062910
3 changed files with 25 additions and 10 deletions

View File

@@ -381,7 +381,7 @@ def get_names_of_scope(evaluator, scope, position=None, star_search=True, includ
... '''))
>>> scope = parser.module.subscopes[0]
>>> scope
<Function: func@3-4>
<Function: func@3-5>
`get_names_of_scope` is a generator. First it yields names from most inner
scope.
@@ -389,13 +389,13 @@ def get_names_of_scope(evaluator, scope, position=None, star_search=True, includ
>>> from jedi.evaluate import Evaluator
>>> pairs = list(get_names_of_scope(Evaluator(), scope))
>>> pairs[0]
(<Function: func@3-4>, [<Name: y@4,4>])
(<Function: func@3-5>, [<Name: y@4,4>])
Then it yield the names from one level outer scope. For this example, this
is the most outer scope.
>>> pairs[1]
(<SubModule: None@1-4>, [<Name: x@2,0>, <Name: func@3,4>])
(<SubModule: None@1-5>, [<Name: x@2,0>, <Name: func@3,4>])
Finally, it yields names from builtin, if `include_builtin` is
true (default).

View File

@@ -47,8 +47,7 @@ class Parser(object):
self._scope = self.module
self._current = (None, None)
tokenizer = tokenizer or tokenize.NoErrorTokenizer(source)
tokenizer = tokenize.NoErrorTokenizer(source, offset, is_fast)
tokenizer = tokenizer or tokenize.source_tokens(source)
self._gen = PushBackTokenizer(tokenizer)
self._top_module = top_module or self.module
try:
@@ -59,6 +58,7 @@ class Parser(object):
# sometimes StopIteration isn't catched. Just ignore it.
# on finish, set end_pos correctly
pass
s = self._scope
while s is not None:
s.end_pos = self.end_pos
@@ -71,6 +71,8 @@ class Parser(object):
d.parent = self.module
if self._current[0] in (tokenize.NEWLINE,):
# This case is only relevant with the FastTokenizer, because
# otherwise there's always an EndMarker.
# we added a newline before, so we need to "remove" it again.
self.end_pos = self._gen.previous[2]
@@ -626,7 +628,7 @@ class PushBackTokenizer(object):
def __init__(self, tokenizer):
self._tokenizer = tokenizer
self._push_backs = []
self.current = [None, None, (0, 0), (0, 0), '']
self.current = tokenize.TokenInfo(None, None, (0, 0), (0, 0))
def push_last_back(self):
self._push_backs.append(self.current)

View File

@@ -2,6 +2,7 @@ from jedi._compatibility import u
from jedi.parser import Parser
from jedi.parser.user_context import UserContextParser
from jedi.parser import representation as pr
from textwrap import dedent
def test_user_statement_on_import():
@@ -95,3 +96,15 @@ def test_module():
assert str(name) == ''
assert name.start_pos == (0, 0)
assert name.end_pos == (0, 0)
def test_end_pos():
s = u(dedent('''
x = ['a', 'b', 'c']
def func():
y = None
'''))
parser = Parser(s)
scope = parser.module.subscopes[0]
assert scope.start_pos == (3, 0)
assert scope.end_pos == (5, 0)