Use PEP 585 syntax in Python 2, protobuf & _ast stubs, where possible (#6949)

This commit is contained in:
Alex Waygood
2022-01-18 15:14:03 +00:00
committed by GitHub
parent aa885ecd65
commit 8af5e0d340
264 changed files with 2217 additions and 2411 deletions

View File

@@ -1,4 +1,4 @@
from typing import Any, Callable, Dict, Generator, Iterable, Iterator, List, Tuple
from typing import Any, Callable, Generator, Iterable, Iterator
__author__: str
__credits__: str
@@ -97,27 +97,27 @@ VBAREQUAL: int
Whitespace: str
chain: type
double3prog: type
endprogs: Dict[str, Any]
endprogs: dict[str, Any]
pseudoprog: type
single3prog: type
single_quoted: Dict[str, str]
single_quoted: dict[str, str]
t: str
tabsize: int
tok_name: Dict[int, str]
tok_name: dict[int, str]
tokenprog: type
triple_quoted: Dict[str, str]
triple_quoted: dict[str, str]
x: str
_Pos = Tuple[int, int]
_TokenType = Tuple[int, str, _Pos, _Pos, str]
_Pos = tuple[int, int]
_TokenType = tuple[int, str, _Pos, _Pos, str]
def any(*args, **kwargs) -> str: ...
def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ...
def group(*args: str) -> str: ...
def maybe(*args: str) -> str: ...
def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ...
def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def tokenize(readline: Callable[[], str], tokeneater: Callable[[tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def untokenize(iterable: Iterable[_TokenType]) -> str: ...
class StopTokenizing(Exception): ...
@@ -126,8 +126,8 @@ class TokenError(Exception): ...
class Untokenizer:
prev_col: int
prev_row: int
tokens: List[str]
tokens: list[str]
def __init__(self) -> None: ...
def add_whitespace(self, _Pos) -> None: ...
def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ...
def compat(self, token: tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ...
def untokenize(self, iterable: Iterable[_TokenType]) -> str: ...