mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-28 06:36:54 +08:00
Fix tokenize.pyi; fix cp command.
This commit is contained in:
@@ -9,5 +9,5 @@ install:
|
||||
|
||||
script:
|
||||
- mypy stdlib/3/
|
||||
- rm -rf 2_7; cp -r stdlib/2.7 2_7; rm 2_7/__builtin__.pyi; mypy --py2 2_7/
|
||||
- rm -rf 2_7; cp -R -L stdlib/2.7 2_7; rm 2_7/__builtin__.pyi ; mypy --py2 2_7/
|
||||
# That doesn't test everything, but it's a start.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Automatically generated by pytype. May contain errors.
|
||||
# Automatically generated by pytype, manually fixed up. May still contain errors.
|
||||
|
||||
from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union, Iterable
|
||||
|
||||
__all__ = ... # type: List[str, ...]
|
||||
__all__ = ... # type: List[str]
|
||||
__author__ = ... # type: str
|
||||
__credits__ = ... # type: str
|
||||
|
||||
@@ -41,10 +41,9 @@ GREATEREQUAL = ... # type: int
|
||||
Hexnumber = ... # type: str
|
||||
INDENT = ... # type: int
|
||||
|
||||
# TODO: fill in type parameters of Callable
|
||||
ISEOF = ... # type: Callable
|
||||
ISNONTERMINAL = ... # type: Callable
|
||||
ISTERMINAL = ... # type: Callable
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
|
||||
Ignore = ... # type: str
|
||||
Imagnumber = ... # type: str
|
||||
@@ -116,14 +115,17 @@ tokenprog = ... # type: type
|
||||
triple_quoted = ... # type: Dict[str, str]
|
||||
x = ... # type: str
|
||||
|
||||
_Pos = Tuple[int, int]
|
||||
_TokenType = Tuple[int, str, _Pos, _Pos, str]
|
||||
|
||||
def any(*args, **kwargs) -> str: ...
|
||||
def generate_tokens(readline: Callable) -> Generator[Tuple[Any, ...], None, None]: ...
|
||||
def group(*args, **kwargs) -> str: ...
|
||||
def maybe(*args, **kwargs) -> str: ...
|
||||
def printtoken(type, token, srow_scol, erow_ecol, line) -> None: ...
|
||||
def tokenize(readline: Callable, *args, **kwargs) -> None: ...
|
||||
def tokenize_loop(readline: Callable, tokeneater: Callable) -> None: ...
|
||||
def untokenize(iterable) -> str: ...
|
||||
def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ...
|
||||
def group(*args: str) -> str: ...
|
||||
def maybe(*args: str) -> str: ...
|
||||
def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ...
|
||||
def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
|
||||
def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
|
||||
def untokenize(iterable: Iterable[_TokenType]) -> str: ...
|
||||
|
||||
class StopTokenizing(Exception):
|
||||
pass
|
||||
@@ -132,10 +134,10 @@ class TokenError(Exception):
|
||||
pass
|
||||
|
||||
class Untokenizer:
|
||||
prev_col = ... # type: Union[int, List[Any, ...]]
|
||||
prev_row = ... # type: Union[int, List[Any, ...]]
|
||||
tokens = ... # type: List[Any, ...]
|
||||
prev_col = ... # type: int
|
||||
prev_row = ... # type: int
|
||||
tokens = ... # type: List[str]
|
||||
def __init__(self) -> None: ...
|
||||
def add_whitespace(self, start) -> None: ...
|
||||
def compat(self, token: Union[Dict[Any, Any], List[Any], Tuple[Any, ...]], iterable: Iterator[Any]) -> None: ...
|
||||
def untokenize(self, iterable) -> str: ...
|
||||
def add_whitespace(self, _Pos) -> None: ...
|
||||
def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ...
|
||||
def untokenize(self, iterable: Iterable[_TokenType]) -> str: ...
|
||||
|
||||
Reference in New Issue
Block a user