forked from VimPlug/jedi
Merge branch 'unicode_tokenize_fix2' of https://github.com/hatamov/jedi into dev
This commit is contained in:
@@ -32,5 +32,6 @@ Albertas Agejevas (@alga)
|
||||
Savor d'Isavano (@KenetJervet) <newelevenken@163.com>
|
||||
Phillip Berndt (@phillipberndt) <phillip.berndt@gmail.com>
|
||||
Ian Lee (@IanLee1521) <IanLee1521@gmail.com>
|
||||
Farkhad Khatamov (@hatamov) <comsgn@gmail.com>
|
||||
|
||||
Note: (@user) means a github user name.
|
||||
|
||||
@@ -68,7 +68,7 @@ double = r'[^"\\]*(?:\\.[^"\\]*)*"'
|
||||
single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
|
||||
# Tail end of """ string.
|
||||
double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
|
||||
triple = group("[bB]?[rR]?'''", '[bB]?[rR]?"""')
|
||||
triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
|
||||
# Single-line ' or " string.
|
||||
|
||||
# Because of leftmost-then-longest match semantics, be sure to put the
|
||||
@@ -126,7 +126,7 @@ single_quoted = {}
|
||||
for t in ("'", '"',
|
||||
"r'", 'r"', "R'", 'R"',
|
||||
"b'", 'b"', "B'", 'B"',
|
||||
"u'", 'u""', "U'", 'U"',
|
||||
"u'", 'u"', "U'", 'U"',
|
||||
"br'", 'br"', "Br'", 'Br"',
|
||||
"bR'", 'bR"', "BR'", 'BR"'):
|
||||
single_quoted[t] = t
|
||||
|
||||
@@ -6,6 +6,7 @@ from textwrap import dedent
|
||||
from jedi._compatibility import u, is_py3
|
||||
from jedi.parser.token import NAME, OP, NEWLINE, STRING, INDENT
|
||||
from jedi import parser
|
||||
from token import STRING
|
||||
|
||||
|
||||
from ..helpers import unittest
|
||||
@@ -95,6 +96,24 @@ class TokenTest(unittest.TestCase):
|
||||
# They will be ignored in the parser, that's ok.
|
||||
assert unicode_token[0] == OP
|
||||
|
||||
def test_quoted_strings(self):
|
||||
|
||||
string_tokens = [
|
||||
'u"test"',
|
||||
'u"""test"""',
|
||||
'U"""test"""',
|
||||
"u'''test'''",
|
||||
"U'''test'''",
|
||||
]
|
||||
|
||||
for s in string_tokens:
|
||||
parsed = parser.Parser(u('''a = %s\n''' % s))
|
||||
tok_list = parsed.module.statements[0]._token_list
|
||||
self.assertEqual(len(tok_list), 3)
|
||||
tok = tok_list[2]
|
||||
self.assertIsInstance(tok, parser.tokenize.Token)
|
||||
self.assertEqual(tok.type, STRING)
|
||||
|
||||
|
||||
def test_tokenizer_with_string_literal_backslash():
|
||||
import jedi
|
||||
|
||||
Reference in New Issue
Block a user