From 2bb0d98b6d2d22e729e3580776891444320711b1 Mon Sep 17 00:00:00 2001 From: Matthias Kramm Date: Tue, 22 Dec 2015 12:01:02 -0800 Subject: [PATCH] add Python 2 stubs for ast, tokenize --- stdlib/2.7/ast.pyi | 28 ++++++++ stdlib/2.7/tokenize.pyi | 141 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 169 insertions(+) create mode 100644 stdlib/2.7/ast.pyi create mode 100644 stdlib/2.7/tokenize.pyi diff --git a/stdlib/2.7/ast.pyi b/stdlib/2.7/ast.pyi new file mode 100644 index 000000000..5524d1013 --- /dev/null +++ b/stdlib/2.7/ast.pyi @@ -0,0 +1,28 @@ +# Automatically generated by pytype. May contain errors. + +from typing import Any, Tuple, Generator + +from _ast import * + +__version__ = ... # type: int +PyCF_ONLY_AST = ... # type: int + +def copy_location(new_node, old_node) -> Any: ... +def dump(node, *args, **kwargs) -> str: ... +def fix_missing_locations(node) -> Any: ... +def get_docstring(node, *args, **kwargs) -> Any: ... +def increment_lineno(node, *args, **kwargs) -> Any: ... +def iter_child_nodes(node) -> Generator[Any, Any, Any]: ... +def iter_fields(node) -> Any: ... # TODO: Generator[Tuple[Any, ...]]: ... +def literal_eval(node_or_string) -> Any: ... +def parse(source, filename, mode, *args, **kwargs) -> Any: ... +def walk(node) -> Any: ... # TODO: Generator[Any]: ... + +class NodeVisitor(object): + __doc__ = ... # type: str + def generic_visit(self, node) -> None: ... + def visit(self, node) -> Any: ... + +class NodeTransformer(NodeVisitor): + __doc__ = ... # type: str + def generic_visit(self, node) -> Any: ... diff --git a/stdlib/2.7/tokenize.pyi b/stdlib/2.7/tokenize.pyi new file mode 100644 index 000000000..0f7a5a1f6 --- /dev/null +++ b/stdlib/2.7/tokenize.pyi @@ -0,0 +1,141 @@ +# Automatically generated by pytype. May contain errors. + +from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union + +__all__ = ... # type: List[str, ...] +__author__ = ... # type: str +__credits__ = ... # type: str + +AMPER = ... # type: int +AMPEREQUAL = ... # type: int +AT = ... # type: int +BACKQUOTE = ... # type: int +Binnumber = ... # type: str +Bracket = ... # type: str +CIRCUMFLEX = ... # type: int +CIRCUMFLEXEQUAL = ... # type: int +COLON = ... # type: int +COMMA = ... # type: int +COMMENT = ... # type: int +Comment = ... # type: str +ContStr = ... # type: str +DEDENT = ... # type: int +DOT = ... # type: int +DOUBLESLASH = ... # type: int +DOUBLESLASHEQUAL = ... # type: int +DOUBLESTAR = ... # type: int +DOUBLESTAREQUAL = ... # type: int +Decnumber = ... # type: str +Double = ... # type: str +Double3 = ... # type: str +ENDMARKER = ... # type: int +EQEQUAL = ... # type: int +EQUAL = ... # type: int +ERRORTOKEN = ... # type: int +Expfloat = ... # type: str +Exponent = ... # type: str +Floatnumber = ... # type: str +Funny = ... # type: str +GREATER = ... # type: int +GREATEREQUAL = ... # type: int +Hexnumber = ... # type: str +INDENT = ... # type: int + +# TODO: fill in type parameters of Callable +ISEOF = ... # type: Callable +ISNONTERMINAL = ... # type: Callable +ISTERMINAL = ... # type: Callable + +Ignore = ... # type: str +Imagnumber = ... # type: str +Intnumber = ... # type: str +LBRACE = ... # type: int +LEFTSHIFT = ... # type: int +LEFTSHIFTEQUAL = ... # type: int +LESS = ... # type: int +LESSEQUAL = ... # type: int +LPAR = ... # type: int +LSQB = ... # type: int +MINEQUAL = ... # type: int +MINUS = ... # type: int +NAME = ... # type: int +NEWLINE = ... # type: int +NL = ... # type: int +NOTEQUAL = ... # type: int +NT_OFFSET = ... # type: int +NUMBER = ... # type: int +N_TOKENS = ... # type: int +Name = ... # type: str +Number = ... # type: str +OP = ... # type: int +Octnumber = ... # type: str +Operator = ... # type: str +PERCENT = ... # type: int +PERCENTEQUAL = ... # type: int +PLUS = ... # type: int +PLUSEQUAL = ... # type: int +PlainToken = ... # type: str +Pointfloat = ... # type: str +PseudoExtras = ... # type: str +PseudoToken = ... # type: str +RBRACE = ... # type: int +RIGHTSHIFT = ... # type: int +RIGHTSHIFTEQUAL = ... # type: int +RPAR = ... # type: int +RSQB = ... # type: int +SEMI = ... # type: int +SLASH = ... # type: int +SLASHEQUAL = ... # type: int +STAR = ... # type: int +STAREQUAL = ... # type: int +STRING = ... # type: int +Single = ... # type: str +Single3 = ... # type: str +Special = ... # type: str +String = ... # type: str +TILDE = ... # type: int +Token = ... # type: str +Triple = ... # type: str +VBAR = ... # type: int +VBAREQUAL = ... # type: int +Whitespace = ... # type: str +chain = ... # type: type +double3prog = ... # type: type +endprogs = ... # type: Dict[str, Any] +pseudoprog = ... # type: type +re = ... # type: module +single3prog = ... # type: type +single_quoted = ... # type: Dict[str, str] +string = ... # type: module +sys = ... # type: module +t = ... # type: str +tabsize = ... # type: int +tok_name = ... # type: Dict[int, str] +token = ... # type: module +tokenprog = ... # type: type +triple_quoted = ... # type: Dict[str, str] +x = ... # type: str + +def any(*args, **kwargs) -> str: ... +def generate_tokens(readline: Callable) -> Generator[Tuple[Any, ...], None, None]: ... +def group(*args, **kwargs) -> str: ... +def maybe(*args, **kwargs) -> str: ... +def printtoken(type, token, srow_scol, erow_ecol, line) -> None: ... +def tokenize(readline: Callable, *args, **kwargs) -> None: ... +def tokenize_loop(readline: Callable, tokeneater: Callable) -> None: ... +def untokenize(iterable) -> str: ... + +class StopTokenizing(Exception): + pass + +class TokenError(Exception): + pass + +class Untokenizer: + prev_col = ... # type: Union[int, List[Any, ...]] + prev_row = ... # type: Union[int, List[Any, ...]] + tokens = ... # type: List[Any, ...] + def __init__(self) -> None: ... + def add_whitespace(self, start) -> None: ... + def compat(self, token: Union[Dict[Any, Any], List[Any], Tuple[Any, ...]], iterable: Iterator[Any]) -> None: ... + def untokenize(self, iterable) -> str: ...