diff --git a/stdlib/3/tokenize.pyi b/stdlib/3/tokenize.pyi index b3c5fd964..60b6ec97e 100644 --- a/stdlib/3/tokenize.pyi +++ b/stdlib/3/tokenize.pyi @@ -41,7 +41,7 @@ class Untokenizer: def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... +def generate_tokens(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... # undocumented if sys.version_info >= (3, 6): from os import PathLike