Fix a few tokenize tests and merge them back together.

This commit is contained in:
Dave Halter
2014-11-26 16:09:28 +01:00
parent f43c371467
commit cc1098b93c
4 changed files with 12 additions and 35 deletions

View File

@@ -1,29 +0,0 @@
from jedi._compatibility import u
from jedi import parser
from ..helpers import unittest
class TokenTest(unittest.TestCase):
def test_end_pos_one_line(self):
parsed = parser.Parser(u('''
def testit():
a = "huhu"
'''))
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
self.assertEqual(tok.end_pos, (3, 14))
def test_end_pos_multi_line(self):
parsed = parser.Parser(u('''
def testit():
a = """huhu
asdfasdf""" + "h"
'''))
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
self.assertEqual(tok.end_pos, (4, 11))
def test_tokenizer_with_string_literal_backslash():
import jedi
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
assert c[0]._name.parent.obj == 'foo'

View File

@@ -9,20 +9,20 @@ from ..helpers import unittest
class TokenTest(unittest.TestCase):
def test_end_pos_one_line(self):
parsed = parser.Parser(u('''
parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
a = "huhu"
'''))
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
tok = parsed.module.subscopes[0].statements[0].children[2]
self.assertEqual(tok.end_pos, (3, 14))
def test_end_pos_multi_line(self):
parsed = parser.Parser(u('''
parsed = parser.Parser(parser.load_grammar(), u('''
def testit():
a = """huhu
asdfasdf""" + "h"
'''))
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
tok = parsed.module.subscopes[0].statements[0].children[2].children[0]
self.assertEqual(tok.end_pos, (4, 11))
def test_simple_no_whitespace(self):
@@ -73,3 +73,9 @@ asdfasdf""" + "h"
self.assertEqual(t._preceding_whitespace, ' ')
if t.string == 'if':
self.assertEqual(t._preceding_whitespace, ' ')
def test_tokenizer_with_string_literal_backslash():
import jedi
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
assert c[0]._name.parent.obj == 'foo'