diff --git a/stdlib/3/tokenize.pyi b/stdlib/3/tokenize.pyi new file mode 100644 index 000000000..947c19527 --- /dev/null +++ b/stdlib/3/tokenize.pyi @@ -0,0 +1,99 @@ +# Stubs for tokenize (Python 3.5) +# +# NOTE: This dynamically typed stub was automatically generated by stubgen. + +from typing import Any, Union, TextIO +from builtins import open as _builtin_open +from token import * + +COMMENT = ... # type: Any +NL = ... # type: Any +ENCODING = ... # type: Any + +class TokenInfo: + @property + def exact_type(self): ... + +class TokenError(Exception): ... +class StopTokenizing(Exception): ... + +class Untokenizer: + tokens = ... # type: Any + prev_row = ... # type: Any + prev_col = ... # type: Any + encoding = ... # type: Any + def __init__(self): ... + def add_whitespace(self, start): ... + def untokenize(self, iterable): ... + def compat(self, token, iterable): ... + +def untokenize(iterable): ... +def detect_encoding(readline): ... +def tokenize(readline): ... + +def open(filename: Union[str, bytes, int]) -> TextIO: ... + +# Names in __all__ with no definition: +# AMPER +# AMPEREQUAL +# ASYNC +# AT +# ATEQUAL +# AWAIT +# CIRCUMFLEX +# CIRCUMFLEXEQUAL +# COLON +# COMMA +# DEDENT +# DOT +# DOUBLESLASH +# DOUBLESLASHEQUAL +# DOUBLESTAR +# DOUBLESTAREQUAL +# ELLIPSIS +# ENDMARKER +# EQEQUAL +# EQUAL +# ERRORTOKEN +# GREATER +# GREATEREQUAL +# INDENT +# ISEOF +# ISNONTERMINAL +# ISTERMINAL +# LBRACE +# LEFTSHIFT +# LEFTSHIFTEQUAL +# LESS +# LESSEQUAL +# LPAR +# LSQB +# MINEQUAL +# MINUS +# NAME +# NEWLINE +# NOTEQUAL +# NT_OFFSET +# NUMBER +# N_TOKENS +# OP +# PERCENT +# PERCENTEQUAL +# PLUS +# PLUSEQUAL +# RARROW +# RBRACE +# RIGHTSHIFT +# RIGHTSHIFTEQUAL +# RPAR +# RSQB +# SEMI +# SLASH +# SLASHEQUAL +# STAR +# STAREQUAL +# STRING +# TILDE +# VBAR +# VBAREQUAL +# tok_name