mirror of
https://github.com/davidhalter/typeshed.git
synced 2026-01-02 17:43:25 +08:00
Complete the tokenize module type hints (#984)
* Complete the tokenize module type hints * Add missing import for Optional * Use a 3.5-style named tuple, untokenize speaks with forked tongue so use Any * Use explicit types for fields
This commit is contained in:
committed by
Łukasz Langa
parent
48b1962951
commit
3f0eb995aa
@@ -2,34 +2,47 @@
|
||||
#
|
||||
# NOTE: This dynamically typed stub was automatically generated by stubgen.
|
||||
|
||||
from typing import Any, Union, TextIO
|
||||
from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple
|
||||
from builtins import open as _builtin_open
|
||||
from token import * # noqa: F403
|
||||
|
||||
COMMENT = ... # type: Any
|
||||
NL = ... # type: Any
|
||||
ENCODING = ... # type: Any
|
||||
COMMENT = ... # type: int
|
||||
NL = ... # type: int
|
||||
ENCODING = ... # type: int
|
||||
|
||||
class TokenInfo:
|
||||
_Position = Tuple[int, int]
|
||||
|
||||
_TokenInfo = NamedTuple('TokenInfo', [
|
||||
('type', int),
|
||||
('string', str),
|
||||
('start', _Position),
|
||||
('end', _Position),
|
||||
('line', str)
|
||||
])
|
||||
|
||||
class TokenInfo(_TokenInfo):
|
||||
@property
|
||||
def exact_type(self): ...
|
||||
def exact_type(self) -> int: ...
|
||||
|
||||
# Backwards compatible tokens can be sequences of a shorter length too
|
||||
_Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
|
||||
|
||||
class TokenError(Exception): ...
|
||||
class StopTokenizing(Exception): ...
|
||||
|
||||
class Untokenizer:
|
||||
tokens = ... # type: Any
|
||||
prev_row = ... # type: Any
|
||||
prev_col = ... # type: Any
|
||||
encoding = ... # type: Any
|
||||
def __init__(self): ...
|
||||
def add_whitespace(self, start): ...
|
||||
def untokenize(self, iterable): ...
|
||||
def compat(self, token, iterable): ...
|
||||
tokens = ... # type: List[str]
|
||||
prev_row = ... # type: int
|
||||
prev_col = ... # type: int
|
||||
encoding = ... # type: Optional[str]
|
||||
def __init__(self) -> None: ...
|
||||
def add_whitespace(self, start: _Position) -> None: ...
|
||||
def untokenize(self, iterable: Iterable[_Token]) -> str: ...
|
||||
def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...
|
||||
|
||||
def untokenize(iterable): ...
|
||||
def detect_encoding(readline): ...
|
||||
def tokenize(readline): ...
|
||||
def untokenize(iterable: Iterable[_Token]) -> Any: ...
|
||||
def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
|
||||
def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
|
||||
|
||||
def open(filename: Union[str, bytes, int]) -> TextIO: ...
|
||||
|
||||
|
||||
Reference in New Issue
Block a user