mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-15 08:17:07 +08:00
Use a more precise return type for tokenize.untokenize() (#13366)
This commit is contained in:
@@ -130,9 +130,8 @@ class Untokenizer:
|
||||
if sys.version_info >= (3, 12):
|
||||
def escape_brackets(self, token: str) -> str: ...
|
||||
|
||||
# the docstring says "returns bytes" but is incorrect --
|
||||
# if the ENCODING token is missing, it skips the encode
|
||||
def untokenize(iterable: Iterable[_Token]) -> Any: ...
|
||||
# Returns str, unless the ENCODING token is present, in which case it returns bytes.
|
||||
def untokenize(iterable: Iterable[_Token]) -> str | Any: ...
|
||||
def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ...
|
||||
def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ...
|
||||
def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...
|
||||
|
||||
Reference in New Issue
Block a user