mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-07 04:34:28 +08:00
100 lines
1.7 KiB
Python
100 lines
1.7 KiB
Python
# Stubs for tokenize (Python 3.5)
|
|
#
|
|
# NOTE: This dynamically typed stub was automatically generated by stubgen.
|
|
|
|
from typing import Any, Union, TextIO
|
|
from builtins import open as _builtin_open
|
|
from token import *
|
|
|
|
COMMENT = ... # type: Any
|
|
NL = ... # type: Any
|
|
ENCODING = ... # type: Any
|
|
|
|
class TokenInfo:
|
|
@property
|
|
def exact_type(self): ...
|
|
|
|
class TokenError(Exception): ...
|
|
class StopTokenizing(Exception): ...
|
|
|
|
class Untokenizer:
|
|
tokens = ... # type: Any
|
|
prev_row = ... # type: Any
|
|
prev_col = ... # type: Any
|
|
encoding = ... # type: Any
|
|
def __init__(self): ...
|
|
def add_whitespace(self, start): ...
|
|
def untokenize(self, iterable): ...
|
|
def compat(self, token, iterable): ...
|
|
|
|
def untokenize(iterable): ...
|
|
def detect_encoding(readline): ...
|
|
def tokenize(readline): ...
|
|
|
|
def open(filename: Union[str, bytes, int]) -> TextIO: ...
|
|
|
|
# Names in __all__ with no definition:
|
|
# AMPER
|
|
# AMPEREQUAL
|
|
# ASYNC
|
|
# AT
|
|
# ATEQUAL
|
|
# AWAIT
|
|
# CIRCUMFLEX
|
|
# CIRCUMFLEXEQUAL
|
|
# COLON
|
|
# COMMA
|
|
# DEDENT
|
|
# DOT
|
|
# DOUBLESLASH
|
|
# DOUBLESLASHEQUAL
|
|
# DOUBLESTAR
|
|
# DOUBLESTAREQUAL
|
|
# ELLIPSIS
|
|
# ENDMARKER
|
|
# EQEQUAL
|
|
# EQUAL
|
|
# ERRORTOKEN
|
|
# GREATER
|
|
# GREATEREQUAL
|
|
# INDENT
|
|
# ISEOF
|
|
# ISNONTERMINAL
|
|
# ISTERMINAL
|
|
# LBRACE
|
|
# LEFTSHIFT
|
|
# LEFTSHIFTEQUAL
|
|
# LESS
|
|
# LESSEQUAL
|
|
# LPAR
|
|
# LSQB
|
|
# MINEQUAL
|
|
# MINUS
|
|
# NAME
|
|
# NEWLINE
|
|
# NOTEQUAL
|
|
# NT_OFFSET
|
|
# NUMBER
|
|
# N_TOKENS
|
|
# OP
|
|
# PERCENT
|
|
# PERCENTEQUAL
|
|
# PLUS
|
|
# PLUSEQUAL
|
|
# RARROW
|
|
# RBRACE
|
|
# RIGHTSHIFT
|
|
# RIGHTSHIFTEQUAL
|
|
# RPAR
|
|
# RSQB
|
|
# SEMI
|
|
# SLASH
|
|
# SLASHEQUAL
|
|
# STAR
|
|
# STAREQUAL
|
|
# STRING
|
|
# TILDE
|
|
# VBAR
|
|
# VBAREQUAL
|
|
# tok_name
|