Fixed issues with last positions in the tokenizer, which was messed up a little bit a few commits ago.

This commit is contained in:
Dave Halter
2015-04-27 21:42:40 +02:00
parent 0a96083fde
commit b6ebb2f8bf
2 changed files with 7 additions and 5 deletions

View File

@@ -282,7 +282,9 @@ def generate_tokens(readline):
paren_level -= 1
yield OP, token, spos, prefix
end_pos = (lnum, max - 1)
# As the last position we just take the maximally possible position. We
# remove -1 for the last new line.
for indent in indents[1:]:
yield DEDENT, '', (lnum, max), ''
# As the last position we just take the max possible.
yield ENDMARKER, '', (lnum, max), prefix
yield DEDENT, '', end_pos, ''
yield ENDMARKER, '', end_pos, prefix

View File

@@ -430,11 +430,11 @@ def test_incomplete_function():
def test_string_literals():
"""Simplified case of jedi-vim#377."""
source = dedent("""
x = ur''' '''
x = ur'''
def foo():
pass
x""")
""")
script = jedi.Script(dedent(source))
assert script.completions()