forked from VimPlug/jedi
Fix a few tokenize tests and merge them back together.
This commit is contained in:
@@ -33,7 +33,7 @@ STATEMENT_KEYWORDS = 'assert', 'del', 'global', 'nonlocal', 'raise', \
|
|||||||
_loaded_grammars = {}
|
_loaded_grammars = {}
|
||||||
|
|
||||||
|
|
||||||
def load_grammar(file):
|
def load_grammar(file='grammar3.4'):
|
||||||
global _loaded_grammars
|
global _loaded_grammars
|
||||||
path = os.path.join(os.path.dirname(__file__), file) + '.txt'
|
path = os.path.join(os.path.dirname(__file__), file) + '.txt'
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import difflib
|
|||||||
|
|
||||||
from jedi import common
|
from jedi import common
|
||||||
from jedi.evaluate import helpers
|
from jedi.evaluate import helpers
|
||||||
from jedi.parser import representation as pr
|
from jedi.parser import tree as pt
|
||||||
|
|
||||||
|
|
||||||
class Refactoring(object):
|
class Refactoring(object):
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
from jedi._compatibility import u
|
|
||||||
from jedi import parser
|
|
||||||
|
|
||||||
from ..helpers import unittest
|
|
||||||
|
|
||||||
|
|
||||||
class TokenTest(unittest.TestCase):
|
|
||||||
def test_end_pos_one_line(self):
|
|
||||||
parsed = parser.Parser(u('''
|
|
||||||
def testit():
|
|
||||||
a = "huhu"
|
|
||||||
'''))
|
|
||||||
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
|
||||||
self.assertEqual(tok.end_pos, (3, 14))
|
|
||||||
|
|
||||||
def test_end_pos_multi_line(self):
|
|
||||||
parsed = parser.Parser(u('''
|
|
||||||
def testit():
|
|
||||||
a = """huhu
|
|
||||||
asdfasdf""" + "h"
|
|
||||||
'''))
|
|
||||||
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
|
||||||
self.assertEqual(tok.end_pos, (4, 11))
|
|
||||||
|
|
||||||
|
|
||||||
def test_tokenizer_with_string_literal_backslash():
|
|
||||||
import jedi
|
|
||||||
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
|
|
||||||
assert c[0]._name.parent.obj == 'foo'
|
|
||||||
@@ -9,20 +9,20 @@ from ..helpers import unittest
|
|||||||
|
|
||||||
class TokenTest(unittest.TestCase):
|
class TokenTest(unittest.TestCase):
|
||||||
def test_end_pos_one_line(self):
|
def test_end_pos_one_line(self):
|
||||||
parsed = parser.Parser(u('''
|
parsed = parser.Parser(parser.load_grammar(), u('''
|
||||||
def testit():
|
def testit():
|
||||||
a = "huhu"
|
a = "huhu"
|
||||||
'''))
|
'''))
|
||||||
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
tok = parsed.module.subscopes[0].statements[0].children[2]
|
||||||
self.assertEqual(tok.end_pos, (3, 14))
|
self.assertEqual(tok.end_pos, (3, 14))
|
||||||
|
|
||||||
def test_end_pos_multi_line(self):
|
def test_end_pos_multi_line(self):
|
||||||
parsed = parser.Parser(u('''
|
parsed = parser.Parser(parser.load_grammar(), u('''
|
||||||
def testit():
|
def testit():
|
||||||
a = """huhu
|
a = """huhu
|
||||||
asdfasdf""" + "h"
|
asdfasdf""" + "h"
|
||||||
'''))
|
'''))
|
||||||
tok = parsed.module.subscopes[0].statements[0]._token_list[2]
|
tok = parsed.module.subscopes[0].statements[0].children[2].children[0]
|
||||||
self.assertEqual(tok.end_pos, (4, 11))
|
self.assertEqual(tok.end_pos, (4, 11))
|
||||||
|
|
||||||
def test_simple_no_whitespace(self):
|
def test_simple_no_whitespace(self):
|
||||||
@@ -73,3 +73,9 @@ asdfasdf""" + "h"
|
|||||||
self.assertEqual(t._preceding_whitespace, ' ')
|
self.assertEqual(t._preceding_whitespace, ' ')
|
||||||
if t.string == 'if':
|
if t.string == 'if':
|
||||||
self.assertEqual(t._preceding_whitespace, ' ')
|
self.assertEqual(t._preceding_whitespace, ' ')
|
||||||
|
|
||||||
|
|
||||||
|
def test_tokenizer_with_string_literal_backslash():
|
||||||
|
import jedi
|
||||||
|
c = jedi.Script("statement = u'foo\\\n'; statement").goto_definitions()
|
||||||
|
assert c[0]._name.parent.obj == 'foo'
|
||||||
|
|||||||
Reference in New Issue
Block a user