diff --git a/jedi/parser/tokenize.py b/jedi/parser/tokenize.py index dc6aa3b4..762f630c 100644 --- a/jedi/parser/tokenize.py +++ b/jedi/parser/tokenize.py @@ -282,7 +282,9 @@ def generate_tokens(readline): paren_level -= 1 yield OP, token, spos, prefix + end_pos = (lnum, max - 1) + # As the last position we just take the maximally possible position. We + # remove -1 for the last new line. for indent in indents[1:]: - yield DEDENT, '', (lnum, max), '' - # As the last position we just take the max possible. - yield ENDMARKER, '', (lnum, max), prefix + yield DEDENT, '', end_pos, '' + yield ENDMARKER, '', end_pos, prefix diff --git a/test/test_parser/test_fast_parser.py b/test/test_parser/test_fast_parser.py index ef00189b..4d1f5f22 100644 --- a/test/test_parser/test_fast_parser.py +++ b/test/test_parser/test_fast_parser.py @@ -430,11 +430,11 @@ def test_incomplete_function(): def test_string_literals(): """Simplified case of jedi-vim#377.""" source = dedent(""" - x = ur''' ''' + x = ur''' def foo(): pass - x""") + """) script = jedi.Script(dedent(source)) assert script.completions()