tokenize: add generate_tokens in py3 (#1449)

Fixes #1433

This is undocumented but somebody is asking for it to be included.
This commit is contained in:
Jelle Zijlstra
2017-07-04 19:17:39 -07:00
committed by Matthias Kramm
parent ab5f196fca
commit e980c8987b

View File

@@ -39,6 +39,7 @@ class Untokenizer:
def untokenize(iterable: Iterable[_Token]) -> Any: ...
def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...
def open(filename: Union[str, bytes, int]) -> TextIO: ...