diff --git a/stdlib/tokenize.pyi b/stdlib/tokenize.pyi index 3d2a93865..e1c8fedee 100644 --- a/stdlib/tokenize.pyi +++ b/stdlib/tokenize.pyi @@ -133,7 +133,7 @@ class Untokenizer: def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented