diff --git a/stdlib/token.pyi b/stdlib/token.pyi index f1fec7698..668987d7c 100644 --- a/stdlib/token.pyi +++ b/stdlib/token.pyi @@ -3,10 +3,8 @@ import sys __all__ = [ "AMPER", "AMPEREQUAL", - "ASYNC", "AT", "ATEQUAL", - "AWAIT", "CIRCUMFLEX", "CIRCUMFLEXEQUAL", "COLON", @@ -71,6 +69,8 @@ __all__ = [ "NL", "COMMENT", ] +if sys.version_info < (3, 13): + __all__ += ["ASYNC", "AWAIT"] if sys.version_info >= (3, 10): __all__ += ["SOFT_KEYWORD"] @@ -131,8 +131,9 @@ AT: int RARROW: int ELLIPSIS: int ATEQUAL: int -AWAIT: int -ASYNC: int +if sys.version_info < (3, 13): + AWAIT: int + ASYNC: int OP: int ERRORTOKEN: int N_TOKENS: int diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 3cd9ab8f8..a13b2d4b0 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -10,10 +10,8 @@ from typing_extensions import TypeAlias __all__ = [ "AMPER", "AMPEREQUAL", - "ASYNC", "AT", "ATEQUAL", - "AWAIT", "CIRCUMFLEX", "CIRCUMFLEXEQUAL", "COLON", @@ -83,6 +81,8 @@ __all__ = [ "tokenize", "untokenize", ] +if sys.version_info < (3, 13): + __all__ += ["ASYNC", "AWAIT"] if sys.version_info >= (3, 10): __all__ += ["SOFT_KEYWORD"] @@ -90,6 +90,9 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 12): __all__ += ["EXCLAMATION", "FSTRING_END", "FSTRING_MIDDLE", "FSTRING_START"] +if sys.version_info >= (3, 13): + __all__ += ["TokenError", "open"] + cookie_re: Pattern[str] blank_re: Pattern[bytes] @@ -110,7 +113,9 @@ class TokenInfo(_TokenInfo): _Token: TypeAlias = TokenInfo | Sequence[int | str | _Position] class TokenError(Exception): ... -class StopTokenizing(Exception): ... # undocumented + +if sys.version_info < (3, 13): + class StopTokenizing(Exception): ... # undocumented class Untokenizer: tokens: list[str] @@ -120,6 +125,8 @@ class Untokenizer: def add_whitespace(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... + if sys.version_info >= (3, 13): + def escape_brackets(self, token: str) -> str: ... # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode