Files
typeshed/stdlib/2.7/tokenize.pyi
2015-12-22 12:01:02 -08:00

142 lines
4.1 KiB
Python

# Automatically generated by pytype. May contain errors.
from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union
__all__ = ... # type: List[str, ...]
__author__ = ... # type: str
__credits__ = ... # type: str
AMPER = ... # type: int
AMPEREQUAL = ... # type: int
AT = ... # type: int
BACKQUOTE = ... # type: int
Binnumber = ... # type: str
Bracket = ... # type: str
CIRCUMFLEX = ... # type: int
CIRCUMFLEXEQUAL = ... # type: int
COLON = ... # type: int
COMMA = ... # type: int
COMMENT = ... # type: int
Comment = ... # type: str
ContStr = ... # type: str
DEDENT = ... # type: int
DOT = ... # type: int
DOUBLESLASH = ... # type: int
DOUBLESLASHEQUAL = ... # type: int
DOUBLESTAR = ... # type: int
DOUBLESTAREQUAL = ... # type: int
Decnumber = ... # type: str
Double = ... # type: str
Double3 = ... # type: str
ENDMARKER = ... # type: int
EQEQUAL = ... # type: int
EQUAL = ... # type: int
ERRORTOKEN = ... # type: int
Expfloat = ... # type: str
Exponent = ... # type: str
Floatnumber = ... # type: str
Funny = ... # type: str
GREATER = ... # type: int
GREATEREQUAL = ... # type: int
Hexnumber = ... # type: str
INDENT = ... # type: int
# TODO: fill in type parameters of Callable
ISEOF = ... # type: Callable
ISNONTERMINAL = ... # type: Callable
ISTERMINAL = ... # type: Callable
Ignore = ... # type: str
Imagnumber = ... # type: str
Intnumber = ... # type: str
LBRACE = ... # type: int
LEFTSHIFT = ... # type: int
LEFTSHIFTEQUAL = ... # type: int
LESS = ... # type: int
LESSEQUAL = ... # type: int
LPAR = ... # type: int
LSQB = ... # type: int
MINEQUAL = ... # type: int
MINUS = ... # type: int
NAME = ... # type: int
NEWLINE = ... # type: int
NL = ... # type: int
NOTEQUAL = ... # type: int
NT_OFFSET = ... # type: int
NUMBER = ... # type: int
N_TOKENS = ... # type: int
Name = ... # type: str
Number = ... # type: str
OP = ... # type: int
Octnumber = ... # type: str
Operator = ... # type: str
PERCENT = ... # type: int
PERCENTEQUAL = ... # type: int
PLUS = ... # type: int
PLUSEQUAL = ... # type: int
PlainToken = ... # type: str
Pointfloat = ... # type: str
PseudoExtras = ... # type: str
PseudoToken = ... # type: str
RBRACE = ... # type: int
RIGHTSHIFT = ... # type: int
RIGHTSHIFTEQUAL = ... # type: int
RPAR = ... # type: int
RSQB = ... # type: int
SEMI = ... # type: int
SLASH = ... # type: int
SLASHEQUAL = ... # type: int
STAR = ... # type: int
STAREQUAL = ... # type: int
STRING = ... # type: int
Single = ... # type: str
Single3 = ... # type: str
Special = ... # type: str
String = ... # type: str
TILDE = ... # type: int
Token = ... # type: str
Triple = ... # type: str
VBAR = ... # type: int
VBAREQUAL = ... # type: int
Whitespace = ... # type: str
chain = ... # type: type
double3prog = ... # type: type
endprogs = ... # type: Dict[str, Any]
pseudoprog = ... # type: type
re = ... # type: module
single3prog = ... # type: type
single_quoted = ... # type: Dict[str, str]
string = ... # type: module
sys = ... # type: module
t = ... # type: str
tabsize = ... # type: int
tok_name = ... # type: Dict[int, str]
token = ... # type: module
tokenprog = ... # type: type
triple_quoted = ... # type: Dict[str, str]
x = ... # type: str
def any(*args, **kwargs) -> str: ...
def generate_tokens(readline: Callable) -> Generator[Tuple[Any, ...], None, None]: ...
def group(*args, **kwargs) -> str: ...
def maybe(*args, **kwargs) -> str: ...
def printtoken(type, token, srow_scol, erow_ecol, line) -> None: ...
def tokenize(readline: Callable, *args, **kwargs) -> None: ...
def tokenize_loop(readline: Callable, tokeneater: Callable) -> None: ...
def untokenize(iterable) -> str: ...
class StopTokenizing(Exception):
pass
class TokenError(Exception):
pass
class Untokenizer:
prev_col = ... # type: Union[int, List[Any, ...]]
prev_row = ... # type: Union[int, List[Any, ...]]
tokens = ... # type: List[Any, ...]
def __init__(self) -> None: ...
def add_whitespace(self, start) -> None: ...
def compat(self, token: Union[Dict[Any, Any], List[Any], Tuple[Any, ...]], iterable: Iterator[Any]) -> None: ...
def untokenize(self, iterable) -> str: ...