From 04f3f77ee5d57813ef790e9244f7ba4e30c9f01f Mon Sep 17 00:00:00 2001 From: "Tomas R." Date: Sun, 6 Oct 2024 15:54:33 +0200 Subject: [PATCH] Remove outdated comment regarding `tokenize.generate_tokens` (#12748) --- stdlib/tokenize.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 3d2a93865..e1c8fedee 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -133,7 +133,7 @@ class Untokenizer: def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented