mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-07 22:44:27 +08:00
Merge branch 'unicode_tokenize_fix2' of https://github.com/hatamov/jedi into dev
This commit is contained in:
@@ -6,6 +6,7 @@ from textwrap import dedent
|
||||
from jedi._compatibility import u, is_py3
|
||||
from jedi.parser.token import NAME, OP, NEWLINE, STRING, INDENT
|
||||
from jedi import parser
|
||||
from token import STRING
|
||||
|
||||
|
||||
from ..helpers import unittest
|
||||
@@ -95,6 +96,24 @@ class TokenTest(unittest.TestCase):
|
||||
# They will be ignored in the parser, that's ok.
|
||||
assert unicode_token[0] == OP
|
||||
|
||||
def test_quoted_strings(self):
|
||||
|
||||
string_tokens = [
|
||||
'u"test"',
|
||||
'u"""test"""',
|
||||
'U"""test"""',
|
||||
"u'''test'''",
|
||||
"U'''test'''",
|
||||
]
|
||||
|
||||
for s in string_tokens:
|
||||
parsed = parser.Parser(u('''a = %s\n''' % s))
|
||||
tok_list = parsed.module.statements[0]._token_list
|
||||
self.assertEqual(len(tok_list), 3)
|
||||
tok = tok_list[2]
|
||||
self.assertIsInstance(tok, parser.tokenize.Token)
|
||||
self.assertEqual(tok.type, STRING)
|
||||
|
||||
|
||||
def test_tokenizer_with_string_literal_backslash():
|
||||
import jedi
|
||||
|
||||
Reference in New Issue
Block a user