From 299d89ab76f6fbd75c972a01c7c14ac41c66c245 Mon Sep 17 00:00:00 2001 From: Sebastian Rittau Date: Wed, 16 Oct 2019 17:55:23 +0200 Subject: [PATCH] generate_tokens(readline) must return bytes (#3372) --- stdlib/3/tokenize.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/3/tokenize.pyi b/stdlib/3/tokenize.pyi index b3c5fd964..60b6ec97e 100644 --- a/stdlib/3/tokenize.pyi +++ b/stdlib/3/tokenize.pyi @@ -41,7 +41,7 @@ class Untokenizer: def untokenize(iterable: Iterable[_Token]) -> Any: ... def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... +def generate_tokens(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... # undocumented if sys.version_info >= (3, 6): from os import PathLike