token, tokenize: update for py313 (#11966)

This commit is contained in:
Shantanu
2024-05-18 14:21:29 -07:00
committed by GitHub
parent ac38fce019
commit 69dc8e291a
2 changed files with 15 additions and 7 deletions

View File

@@ -10,10 +10,8 @@ from typing_extensions import TypeAlias
__all__ = [
"AMPER",
"AMPEREQUAL",
"ASYNC",
"AT",
"ATEQUAL",
"AWAIT",
"CIRCUMFLEX",
"CIRCUMFLEXEQUAL",
"COLON",
@@ -83,6 +81,8 @@ __all__ = [
"tokenize",
"untokenize",
]
if sys.version_info < (3, 13):
__all__ += ["ASYNC", "AWAIT"]
if sys.version_info >= (3, 10):
__all__ += ["SOFT_KEYWORD"]
@@ -90,6 +90,9 @@ if sys.version_info >= (3, 10):
if sys.version_info >= (3, 12):
__all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START"]
if sys.version_info >= (3, 13):
__all__ += ["TokenError", "open"]
cookie_re: Pattern[str]
blank_re: Pattern[bytes]
@@ -110,7 +113,9 @@ class TokenInfo(_TokenInfo):
_Token: TypeAlias = TokenInfo | Sequence[int | str | _Position]
class TokenError(Exception): ...
class StopTokenizing(Exception): ... # undocumented
if sys.version_info < (3, 13):
class StopTokenizing(Exception): ... # undocumented
class Untokenizer:
tokens: list[str]
@@ -120,6 +125,8 @@ class Untokenizer:
def add_whitespace(self, start: _Position) -> None: ...
def untokenize(self, iterable: Iterable[_Token]) -> str: ...
def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ...
if sys.version_info >= (3, 13):
def escape_brackets(self, token: str) -> str: ...
# the docstring says "returns bytes" but is incorrect --
# if the ENCODING token is missing, it skips the encode