Create pygments stubs (#5759)

* run stubgen

* replace individual lexers with __getattr__ in lexers/__init__.pyi

* replace individual styles with __getattr__ in styles/__init__.pyi

* import-export each formatter into formatters/__init__.pyi

* clean up slightly with quick and dirty script

* manual fixing
This commit is contained in:
Akuli
2021-07-11 16:27:35 +03:00
committed by GitHub
parent a90573feb2
commit e676fcabd3
35 changed files with 776 additions and 0 deletions

View File

@@ -39,6 +39,7 @@
"stubs/Pillow",
"stubs/paramiko",
"stubs/protobuf",
"stubs/Pygments",
"stubs/PyMySQL",
"stubs/python-dateutil",
"stubs/pyvmomi",

View File

@@ -0,0 +1 @@
sphinx

View File

@@ -0,0 +1,12 @@
# Pygments uses mcs, pyright wants cls
pygments.lexer.LexerMeta.__new__
pygments.style.StyleMeta.__new__
# Defined in lexer classes, intended to be used as static method, but doesn't use @staticmethod
pygments.lexer.LexerMeta.analyse_text
# Inheriting from tuple seems to do something weird
pygments.token.Number
pygments.token.String
pygments.token.Token
pygments.token.Whitespace

View File

@@ -0,0 +1,2 @@
version = "2.9"
requires = ["types-docutils"]

View File

@@ -0,0 +1,5 @@
from typing import Any, Optional
def lex(code, lexer): ...
def format(tokens, formatter, outfile: Optional[Any] = ...): ...
def highlight(code, lexer, formatter, outfile: Optional[Any] = ...): ...

View File

@@ -0,0 +1,9 @@
import argparse
from typing import Any, Optional
def main_inner(parser, argns): ...
class HelpFormatter(argparse.HelpFormatter):
def __init__(self, prog, indent_increment: int = ..., max_help_position: int = ..., width: Optional[Any] = ...) -> None: ...
def main(args=...): ...

View File

@@ -0,0 +1,10 @@
from typing import Any
esc: str
codes: Any
dark_colors: Any
light_colors: Any
def reset_color(): ...
def colorize(color_key, text): ...
def ansiformat(attr, text): ...

View File

@@ -0,0 +1,18 @@
from collections.abc import Iterable, Iterator
from typing import Any, Optional
from pygments.lexer import Lexer
from pygments.token import _TokenType
def apply_filters(stream, filters, lexer: Optional[Any] = ...): ...
def simplefilter(f): ...
class Filter:
options: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class FunctionFilter(Filter):
function: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...

View File

@@ -0,0 +1,58 @@
from collections.abc import Iterable, Iterator
from typing import Any
from pygments.filter import Filter
from pygments.lexer import Lexer
from pygments.token import _TokenType
def find_filter_class(filtername): ...
def get_filter_by_name(filtername, **options): ...
def get_all_filters(): ...
class CodeTagFilter(Filter):
tag_re: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class SymbolFilter(Filter):
latex_symbols: Any
isabelle_symbols: Any
lang_map: Any
symbols: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class KeywordCaseFilter(Filter):
convert: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class NameHighlightFilter(Filter):
names: Any
tokentype: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class ErrorToken(Exception): ...
class RaiseOnErrorTokenFilter(Filter):
exception: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class VisibleWhitespaceFilter(Filter):
wstt: Any
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class GobbleFilter(Filter):
n: Any
def __init__(self, **options) -> None: ...
def gobble(self, value, left): ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class TokenMergeFilter(Filter):
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
FILTERS: Any

View File

@@ -0,0 +1,15 @@
from typing import Any
class Formatter:
name: Any
aliases: Any
filenames: Any
unicodeoutput: bool
style: Any
full: Any
title: Any
encoding: Any
options: Any
def __init__(self, **options) -> None: ...
def get_style_defs(self, arg: str = ...): ...
def format(self, tokensource, outfile): ...

View File

@@ -0,0 +1,21 @@
from .bbcode import BBCodeFormatter as BBCodeFormatter
from .html import HtmlFormatter as HtmlFormatter
from .img import (
BmpImageFormatter as BmpImageFormatter,
GifImageFormatter as GifImageFormatter,
ImageFormatter as ImageFormatter,
JpgImageFormatter as JpgImageFormatter,
)
from .irc import IRCFormatter as IRCFormatter
from .latex import LatexFormatter as LatexFormatter
from .other import NullFormatter as NullFormatter, RawTokenFormatter as RawTokenFormatter, TestcaseFormatter as TestcaseFormatter
from .pangomarkup import PangoMarkupFormatter as PangoMarkupFormatter
from .rtf import RtfFormatter as RtfFormatter
from .svg import SvgFormatter as SvgFormatter
from .terminal import TerminalFormatter as TerminalFormatter
from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter
def get_all_formatters() -> None: ...
def get_formatter_by_name(_alias, **options): ...
def load_formatter_from_file(filename, formattername: str = ..., **options): ...
def get_formatter_for_filename(fn, **options): ...

View File

@@ -0,0 +1,3 @@
from typing import Any
FORMATTERS: Any

View File

@@ -0,0 +1,11 @@
from typing import Any
from pygments.formatter import Formatter
class BBCodeFormatter(Formatter):
name: str
aliases: Any
filenames: Any
styles: Any
def __init__(self, **options) -> None: ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,40 @@
from typing import Any, Optional
from pygments.formatter import Formatter
class HtmlFormatter(Formatter):
name: str
aliases: Any
filenames: Any
title: Any
nowrap: Any
noclasses: Any
classprefix: Any
cssclass: Any
cssstyles: Any
prestyles: Any
cssfile: Any
noclobber_cssfile: Any
tagsfile: Any
tagurlformat: Any
filename: Any
wrapcode: Any
span_element_openers: Any
linenos: int
linenostart: Any
linenostep: Any
linenospecial: Any
nobackground: Any
lineseparator: Any
lineanchors: Any
linespans: Any
anchorlinenos: Any
hl_lines: Any
def __init__(self, **options) -> None: ...
def get_style_defs(self, arg: Optional[Any] = ...): ...
def get_token_style_defs(self, arg: Optional[Any] = ...): ...
def get_background_style_defs(self, arg: Optional[Any] = ...): ...
def get_linenos_style_defs(self): ...
def get_css_prefix(self, arg): ...
def wrap(self, source, outfile): ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,65 @@
from typing import Any
from pygments.formatter import Formatter
class PilNotAvailable(ImportError): ...
class FontNotFound(Exception): ...
class FontManager:
font_name: Any
font_size: Any
fonts: Any
encoding: Any
def __init__(self, font_name, font_size: int = ...) -> None: ...
def get_char_size(self): ...
def get_text_size(self, text): ...
def get_font(self, bold, oblique): ...
class ImageFormatter(Formatter):
name: str
aliases: Any
filenames: Any
unicodeoutput: bool
default_image_format: str
encoding: str
styles: Any
background_color: str
image_format: Any
image_pad: Any
line_pad: Any
fonts: Any
line_number_fg: Any
line_number_bg: Any
line_number_chars: Any
line_number_bold: Any
line_number_italic: Any
line_number_pad: Any
line_numbers: Any
line_number_separator: Any
line_number_step: Any
line_number_start: Any
line_number_width: Any
hl_lines: Any
hl_color: Any
drawables: Any
def __init__(self, **options) -> None: ...
def get_style_defs(self, arg: str = ...) -> None: ...
def format(self, tokensource, outfile) -> None: ...
class GifImageFormatter(ImageFormatter):
name: str
aliases: Any
filenames: Any
default_image_format: str
class JpgImageFormatter(ImageFormatter):
name: str
aliases: Any
filenames: Any
default_image_format: str
class BmpImageFormatter(ImageFormatter):
name: str
aliases: Any
filenames: Any
default_image_format: str

View File

@@ -0,0 +1,13 @@
from typing import Any
from pygments.formatter import Formatter
class IRCFormatter(Formatter):
name: str
aliases: Any
filenames: Any
darkbg: Any
colorscheme: Any
linenos: Any
def __init__(self, **options) -> None: ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,33 @@
from typing import Any
from pygments.formatter import Formatter
from pygments.lexer import Lexer
class LatexFormatter(Formatter):
name: str
aliases: Any
filenames: Any
docclass: Any
preamble: Any
linenos: Any
linenostart: Any
linenostep: Any
verboptions: Any
nobackground: Any
commandprefix: Any
texcomments: Any
mathescape: Any
escapeinside: Any
left: Any
right: Any
envname: Any
def __init__(self, **options) -> None: ...
def get_style_defs(self, arg: str = ...): ...
def format_unencoded(self, tokensource, outfile) -> None: ...
class LatexEmbeddedLexer(Lexer):
left: Any
right: Any
lang: Any
def __init__(self, left, right, lang, **options) -> None: ...
def get_tokens_unprocessed(self, text): ...

View File

@@ -0,0 +1,26 @@
from typing import Any
from pygments.formatter import Formatter
class NullFormatter(Formatter):
name: str
aliases: Any
filenames: Any
def format(self, tokensource, outfile) -> None: ...
class RawTokenFormatter(Formatter):
name: str
aliases: Any
filenames: Any
unicodeoutput: bool
encoding: str
compress: Any
error_color: Any
def __init__(self, **options) -> None: ...
def format(self, tokensource, outfile) -> None: ...
class TestcaseFormatter(Formatter):
name: str
aliases: Any
def __init__(self, **options) -> None: ...
def format(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,11 @@
from typing import Any
from pygments.formatter import Formatter
class PangoMarkupFormatter(Formatter):
name: str
aliases: Any
filenames: Any
styles: Any
def __init__(self, **options) -> None: ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,12 @@
from typing import Any
from pygments.formatter import Formatter
class RtfFormatter(Formatter):
name: str
aliases: Any
filenames: Any
fontface: Any
fontsize: Any
def __init__(self, **options) -> None: ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,21 @@
from typing import Any
from pygments.formatter import Formatter
class SvgFormatter(Formatter):
name: str
aliases: Any
filenames: Any
nowrap: Any
fontfamily: Any
fontsize: Any
xoffset: Any
yoffset: Any
ystep: Any
spacehack: Any
linenos: Any
linenostart: Any
linenostep: Any
linenowidth: Any
def __init__(self, **options) -> None: ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,14 @@
from typing import Any
from pygments.formatter import Formatter
class TerminalFormatter(Formatter):
name: str
aliases: Any
filenames: Any
darkbg: Any
colorscheme: Any
linenos: Any
def __init__(self, **options) -> None: ...
def format(self, tokensource, outfile): ...
def format_unencoded(self, tokensource, outfile) -> None: ...

View File

@@ -0,0 +1,37 @@
from typing import Any, Optional
from pygments.formatter import Formatter
class EscapeSequence:
fg: Any
bg: Any
bold: Any
underline: Any
italic: Any
def __init__(
self, fg: Optional[Any] = ..., bg: Optional[Any] = ..., bold: bool = ..., underline: bool = ..., italic: bool = ...
) -> None: ...
def escape(self, attrs): ...
def color_string(self): ...
def true_color_string(self): ...
def reset_string(self): ...
class Terminal256Formatter(Formatter):
name: str
aliases: Any
filenames: Any
xterm_colors: Any
best_match: Any
style_string: Any
usebold: Any
useunderline: Any
useitalic: Any
linenos: Any
def __init__(self, **options) -> None: ...
def format(self, tokensource, outfile): ...
def format_unencoded(self, tokensource, outfile) -> None: ...
class TerminalTrueColorFormatter(Terminal256Formatter):
name: str
aliases: Any
filenames: Any

View File

@@ -0,0 +1,97 @@
from collections.abc import Iterable, Iterator, Sequence
from typing import Any, Optional, Tuple
from pygments.token import _TokenType
from pygments.util import Future
class LexerMeta(type):
def __new__(cls, name, bases, d): ...
def analyse_text(self, text) -> None: ... # actually defined in class Lexer
# ClassVars of Lexer, but same situation as with StyleMeta and Style
name: str
aliases: Sequence[str] # not intended mutable
filenames: Sequence[str]
alias_filenames: Sequence[str]
mimetypes: Sequence[str]
priority: int
class Lexer(metaclass=LexerMeta):
options: Any
stripnl: Any
stripall: Any
ensurenl: Any
tabsize: Any
encoding: Any
filters: Any
def __init__(self, **options) -> None: ...
def add_filter(self, filter_, **options) -> None: ...
def get_tokens(self, text: str, unfiltered: bool = ...) -> Iterator[tuple[_TokenType, str]]: ...
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
class DelegatingLexer(Lexer):
root_lexer: Any
language_lexer: Any
needle: Any
def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ...
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
class include(str): ...
class _inherit: ...
inherit: Any
class combined(Tuple[Any]):
def __new__(cls, *args): ...
def __init__(self, *args) -> None: ...
class _PseudoMatch:
def __init__(self, start, text) -> None: ...
def start(self, arg: Optional[Any] = ...): ...
def end(self, arg: Optional[Any] = ...): ...
def group(self, arg: Optional[Any] = ...): ...
def groups(self): ...
def groupdict(self): ...
def bygroups(*args): ...
class _This: ...
this: Any
def using(_other, **kwargs): ...
class default:
state: Any
def __init__(self, state) -> None: ...
class words(Future):
words: Any
prefix: Any
suffix: Any
def __init__(self, words, prefix: str = ..., suffix: str = ...) -> None: ...
def get(self): ...
class RegexLexerMeta(LexerMeta):
def process_tokendef(cls, name, tokendefs: Optional[Any] = ...): ...
def get_tokendefs(cls): ...
def __call__(cls, *args, **kwds): ...
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
flags: Any
tokens: Any
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...
class LexerContext:
text: Any
pos: Any
end: Any
stack: Any
def __init__(self, text, pos, stack: Optional[Any] = ..., end: Optional[Any] = ...) -> None: ...
class ExtendedRegexLexer(RegexLexer):
def get_tokens_unprocessed(self, text: str | None = ..., context: LexerContext | None = ...) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore
class ProfilingRegexLexerMeta(RegexLexerMeta): ...
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...

View File

@@ -0,0 +1,22 @@
from _typeshed import StrOrBytesPath, StrPath
from collections.abc import Iterator
from typing import Any, Tuple, Union
from pygments.lexer import Lexer, LexerMeta
_OpenFile = Union[StrOrBytesPath, int] # copy/pasted from builtins.pyi
# TODO: use lower-case tuple once mypy updated
def get_all_lexers() -> Iterator[tuple[str, Tuple[str, ...], Tuple[str, ...], Tuple[str, ...]]]: ...
def find_lexer_class(name: str) -> LexerMeta | None: ...
def find_lexer_class_by_name(_alias: str) -> LexerMeta: ...
def get_lexer_by_name(_alias: str, **options: Any) -> Lexer: ...
def load_lexer_from_file(filename: _OpenFile, lexername: str = ..., **options: Any) -> Lexer: ...
def find_lexer_class_for_filename(_fn: StrPath, code: str | bytes | None = ...) -> LexerMeta | None: ...
def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = ..., **options: Any) -> Lexer: ...
def get_lexer_for_mimetype(_mime: str, **options: Any) -> Lexer: ...
def guess_lexer_for_filename(_fn: StrPath, _text: str, **options: Any) -> LexerMeta | None: ...
def guess_lexer(_text: str | bytes, **options: Any) -> Lexer: ...
# Having every lexer class here doesn't seem to be worth it
def __getattr__(name: str) -> Any: ...

View File

@@ -0,0 +1 @@
def get_filetype_from_buffer(buf, max_lines: int = ...): ...

View File

@@ -0,0 +1,10 @@
LEXER_ENTRY_POINT: str
FORMATTER_ENTRY_POINT: str
STYLE_ENTRY_POINT: str
FILTER_ENTRY_POINT: str
def iter_entry_points(group_name): ...
def find_plugin_lexers() -> None: ...
def find_plugin_formatters() -> None: ...
def find_plugin_styles() -> None: ...
def find_plugin_filters() -> None: ...

View File

@@ -0,0 +1,8 @@
from typing import Any
CS_ESCAPE: Any
FIRST_ELEMENT: Any
def make_charset(letters): ...
def regex_opt_inner(strings, open_paren): ...
def regex_opt(strings, prefix: str = ..., suffix: str = ...): ...

View File

@@ -0,0 +1,19 @@
from typing import Any
class EndOfText(RuntimeError): ...
class Scanner:
data: Any
data_length: Any
start_pos: int
pos: int
flags: Any
last: Any
match: Any
def __init__(self, text, flags: int = ...) -> None: ...
@property
def eos(self): ...
def check(self, pattern): ...
def test(self, pattern): ...
def scan(self, pattern): ...
def get_char(self) -> None: ...

View File

@@ -0,0 +1,22 @@
from typing import Any
from docutils.parsers.rst import Directive
MODULEDOC: str
LEXERDOC: str
FMTERDOC: str
FILTERDOC: str
class PygmentsDoc(Directive):
has_content: bool
required_arguments: int
optional_arguments: int
final_argument_whitespace: bool
option_spec: Any
filenames: Any
def run(self): ...
def document_lexers(self): ...
def document_formatters(self): ...
def document_filters(self): ...
def setup(app) -> None: ...

View File

@@ -0,0 +1,40 @@
from collections.abc import Iterator, Mapping, Set
from typing_extensions import TypedDict
from pygments.token import _TokenType
ansicolors: Set[str] # not intended to be mutable
class _StyleDict(TypedDict):
color: str | None
bold: bool
italic: bool
underline: bool
bgcolor: str | None
border: str | None
roman: bool | None # lol yes, can be True or False or None
sans: bool | None
mono: bool | None
ansicolor: str | None
bgansicolor: str | None
class StyleMeta(type):
def __new__(cls, name, bases, dct): ...
def style_for_token(cls, token: _TokenType) -> _StyleDict: ...
def styles_token(cls, ttype: _TokenType) -> bool: ...
def list_styles(cls) -> list[tuple[_TokenType, _StyleDict]]: ...
def __iter__(cls) -> Iterator[tuple[_TokenType, _StyleDict]]: ...
def __len__(cls) -> int: ...
# These are a bit tricky.
# Technically should be ClassVar in class Style.
# But then we can't use StyleMeta to denote a style class.
# We need that because Type[Style] is not iterable, for example.
background_color: str
highlight_color: str
line_number_color: str
line_number_background_color: str
line_number_special_color: str
line_number_special_background_color: str
styles: Mapping[_TokenType, str] # not intended to be mutable
class Style(metaclass=StyleMeta): ...

View File

@@ -0,0 +1,13 @@
from collections.abc import Iterator, Mapping
from typing import Any
from pygments.style import StyleMeta
from pygments.util import ClassNotFound as ClassNotFound
STYLE_MAP: Mapping[str, str]
def get_style_by_name(name) -> StyleMeta: ...
def get_all_styles() -> Iterator[str]: ...
# Having every style class here doesn't seem to be worth it
def __getattr__(name: str) -> Any: ...

View File

@@ -0,0 +1,34 @@
from collections.abc import Mapping
from typing import Tuple
class _TokenType(Tuple[str]): # TODO: change to lower-case tuple once new mypy released
parent: _TokenType | None
def split(self) -> list[_TokenType]: ...
subtypes: set[_TokenType]
def __init__(self, *args: str) -> None: ...
def __contains__(self, val: _TokenType) -> bool: ... # type: ignore
def __getattr__(self, name: str) -> _TokenType: ...
def __copy__(self): ...
def __deepcopy__(self, memo): ...
Token: _TokenType
Text: _TokenType
Whitespace: _TokenType
Escape: _TokenType
Error: _TokenType
Other: _TokenType
Keyword: _TokenType
Name: _TokenType
Literal: _TokenType
String: _TokenType
Number: _TokenType
Punctuation: _TokenType
Operator: _TokenType
Comment: _TokenType
Generic: _TokenType
def is_token_subtype(ttype, other): ...
def string_to_tokentype(s): ...
# Dict, but shouldn't be mutated
STANDARD_TYPES: Mapping[_TokenType, str]

View File

@@ -0,0 +1,38 @@
from typing import Any
Cc: str
Cf: str
Cn: str
Co: str
Cs: str
Ll: str
Lm: str
Lo: str
Lt: str
Lu: str
Mc: str
Me: str
Mn: str
Nd: str
Nl: str
No: str
Pc: str
Pd: str
Pe: str
Pf: str
Pi: str
Po: str
Ps: str
Sc: str
Sk: str
Sm: str
So: str
Zl: str
Zp: str
Zs: str
xid_continue: str
xid_start: str
cats: Any
def combine(*args): ...
def allexcept(*args): ...

View File

@@ -0,0 +1,34 @@
from io import TextIOWrapper
from typing import Any, Optional
split_path_re: Any
doctype_lookup_re: Any
tag_re: Any
xml_decl_re: Any
class ClassNotFound(ValueError): ...
class OptionError(Exception): ...
def get_choice_opt(options, optname, allowed, default: Optional[Any] = ..., normcase: bool = ...): ...
def get_bool_opt(options, optname, default: Optional[Any] = ...): ...
def get_int_opt(options, optname, default: Optional[Any] = ...): ...
def get_list_opt(options, optname, default: Optional[Any] = ...): ...
def docstring_headline(obj): ...
def make_analysator(f): ...
def shebang_matches(text, regex): ...
def doctype_matches(text, regex): ...
def html_doctype_matches(text): ...
def looks_like_xml(text): ...
def surrogatepair(c): ...
def format_lines(var_name, seq, raw: bool = ..., indent_level: int = ...): ...
def duplicates_removed(it, already_seen=...): ...
class Future:
def get(self) -> None: ...
def guess_decode(text): ...
def guess_decode_from_terminal(text, term): ...
def terminal_encoding(term): ...
class UnclosingTextIOWrapper(TextIOWrapper):
def close(self) -> None: ...