mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-08 04:54:47 +08:00
tokenize: add generate_tokens in py3 (#1449)
Fixes #1433 This is undocumented but somebody is asking for it to be included.
This commit is contained in:
committed by
Matthias Kramm
parent
ab5f196fca
commit
e980c8987b
@@ -39,6 +39,7 @@ class Untokenizer:
|
||||
def untokenize(iterable: Iterable[_Token]) -> Any: ...
|
||||
def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
|
||||
def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
|
||||
def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...
|
||||
|
||||
def open(filename: Union[str, bytes, int]) -> TextIO: ...
|
||||
|
||||
|
||||
Reference in New Issue
Block a user