1
0
forked from VimPlug/jedi

fix strange issues of Python's std lib tokenizer, might be in there as well (not sure, cause I modified a lot). fixes #449

This commit is contained in:
Dave Halter
2014-08-04 16:47:36 +02:00
parent b2b4827ce3
commit 54dce0e3b2
2 changed files with 8 additions and 2 deletions

View File

@@ -282,8 +282,8 @@ def generate_tokens(readline, line_offset=0):
token[:3] in single_quoted: token[:3] in single_quoted:
if token[-1] == '\n': # continued string if token[-1] == '\n': # continued string
contstr_start = lnum, start contstr_start = lnum, start
endprog = (endprogs[initial] or endprogs[token[1]] or endprog = (endprogs.get(initial) or endprogs.get(token[1])
endprogs[token[2]]) or endprogs.get(token[2]))
contstr = line[start:] contstr = line[start:]
contline = line contline = line
break break

View File

@@ -21,3 +21,9 @@ asdfasdf""" + "h"
''')) '''))
tok = parsed.module.subscopes[0].statements[0]._token_list[2] tok = parsed.module.subscopes[0].statements[0]._token_list[2]
self.assertEqual(tok.end_pos, (4, 11)) self.assertEqual(tok.end_pos, (4, 11))
def test_tokenizer_with_string_literal_backslash():
import jedi
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
assert c[0]._definition.obj == 'foo'