Remove outdated comment regarding tokenize.generate_tokens (#12748)

This commit is contained in:
Tomas R.
2024-10-06 15:54:33 +02:00
committed by GitHub
parent 0aa5186826
commit 04f3f77ee5

View File

@@ -133,7 +133,7 @@ class Untokenizer:
def untokenize(iterable: Iterable[_Token]) -> Any: ...
def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ...
def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ...
def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented
def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...
def open(filename: FileDescriptorOrPath) -> TextIO: ...
def group(*choices: str) -> str: ... # undocumented
def any(*choices: str) -> str: ... # undocumented