forked from VimPlug/jedi
fix strange issues of Python's std lib tokenizer, might be in there as well (not sure, cause I modified a lot). fixes #449
This commit is contained in:
@@ -282,8 +282,8 @@ def generate_tokens(readline, line_offset=0):
|
|||||||
token[:3] in single_quoted:
|
token[:3] in single_quoted:
|
||||||
if token[-1] == '\n': # continued string
|
if token[-1] == '\n': # continued string
|
||||||
contstr_start = lnum, start
|
contstr_start = lnum, start
|
||||||
endprog = (endprogs[initial] or endprogs[token[1]] or
|
endprog = (endprogs.get(initial) or endprogs.get(token[1])
|
||||||
endprogs[token[2]])
|
or endprogs.get(token[2]))
|
||||||
contstr = line[start:]
|
contstr = line[start:]
|
||||||
contline = line
|
contline = line
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -21,3 +21,9 @@ asdfasdf""" + "h"
|
|||||||
'''))
|
'''))
|
||||||
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
||||||
self.assertEqual(tok.end_pos, (4, 11))
|
self.assertEqual(tok.end_pos, (4, 11))
|
||||||
|
|
||||||
|
|
||||||
|
def test_tokenizer_with_string_literal_backslash():
|
||||||
|
import jedi
|
||||||
|
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
|
||||||
|
assert c[0]._definition.obj == 'foo'
|
||||||
|
|||||||
Reference in New Issue
Block a user