mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-30 08:04:24 +08:00
Add default values for third-party stubs beginning with 'P' (#9957)
This commit is contained in:
@@ -12,8 +12,8 @@ def lex(code, lexer): ...
|
||||
@overload
|
||||
def format(tokens, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
|
||||
@overload
|
||||
def format(tokens, formatter: Formatter[_T], outfile: None = ...) -> _T: ...
|
||||
def format(tokens, formatter: Formatter[_T], outfile: None = None) -> _T: ...
|
||||
@overload
|
||||
def highlight(code, lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
|
||||
@overload
|
||||
def highlight(code, lexer, formatter: Formatter[_T], outfile: None = ...) -> _T: ...
|
||||
def highlight(code, lexer, formatter: Formatter[_T], outfile: None = None) -> _T: ...
|
||||
|
||||
@@ -4,8 +4,6 @@ from _typeshed import Incomplete
|
||||
def main_inner(parser, argns): ...
|
||||
|
||||
class HelpFormatter(argparse.HelpFormatter):
|
||||
def __init__(
|
||||
self, prog, indent_increment: int = ..., max_help_position: int = ..., width: Incomplete | None = ...
|
||||
) -> None: ...
|
||||
def __init__(self, prog, indent_increment: int = 2, max_help_position: int = 16, width: Incomplete | None = None) -> None: ...
|
||||
|
||||
def main(args=...): ...
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Any
|
||||
from pygments.lexer import Lexer
|
||||
from pygments.token import _TokenType
|
||||
|
||||
def apply_filters(stream, filters, lexer: Incomplete | None = ...): ...
|
||||
def apply_filters(stream, filters, lexer: Incomplete | None = None): ...
|
||||
def simplefilter(f): ...
|
||||
|
||||
class Filter:
|
||||
|
||||
@@ -13,10 +13,10 @@ class Formatter(Generic[_T]):
|
||||
encoding: Any
|
||||
options: Any
|
||||
@overload
|
||||
def __init__(self: Formatter[str], *, encoding: None = ..., outencoding: None = ..., **options) -> None: ...
|
||||
def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options) -> None: ...
|
||||
@overload
|
||||
def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = ..., **options) -> None: ...
|
||||
def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = None, **options) -> None: ...
|
||||
@overload
|
||||
def __init__(self: Formatter[bytes], *, encoding: None = ..., outencoding: str, **options) -> None: ...
|
||||
def get_style_defs(self, arg: str = ...): ...
|
||||
def __init__(self: Formatter[bytes], *, encoding: None = None, outencoding: str, **options) -> None: ...
|
||||
def get_style_defs(self, arg: str = ""): ...
|
||||
def format(self, tokensource, outfile): ...
|
||||
|
||||
@@ -21,5 +21,5 @@ from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalT
|
||||
|
||||
def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ...
|
||||
def get_formatter_by_name(_alias, **options): ...
|
||||
def load_formatter_from_file(filename, formattername: str = ..., **options): ...
|
||||
def load_formatter_from_file(filename, formattername: str = "CustomFormatter", **options): ...
|
||||
def get_formatter_for_filename(fn, **options): ...
|
||||
|
||||
@@ -33,9 +33,9 @@ class HtmlFormatter(Formatter[_T]):
|
||||
linespans: Any
|
||||
anchorlinenos: Any
|
||||
hl_lines: Any
|
||||
def get_style_defs(self, arg: Incomplete | None = ...): ...
|
||||
def get_token_style_defs(self, arg: Incomplete | None = ...): ...
|
||||
def get_background_style_defs(self, arg: Incomplete | None = ...): ...
|
||||
def get_style_defs(self, arg: Incomplete | None = None): ...
|
||||
def get_token_style_defs(self, arg: Incomplete | None = None): ...
|
||||
def get_background_style_defs(self, arg: Incomplete | None = None): ...
|
||||
def get_linenos_style_defs(self): ...
|
||||
def get_css_prefix(self, arg): ...
|
||||
def wrap(self, source): ...
|
||||
|
||||
@@ -12,7 +12,7 @@ class FontManager:
|
||||
font_size: Any
|
||||
fonts: Any
|
||||
encoding: Any
|
||||
def __init__(self, font_name, font_size: int = ...) -> None: ...
|
||||
def __init__(self, font_name, font_size: int = 14) -> None: ...
|
||||
def get_char_size(self): ...
|
||||
def get_text_size(self, text): ...
|
||||
def get_font(self, bold, oblique): ...
|
||||
@@ -44,7 +44,7 @@ class ImageFormatter(Formatter[_T]):
|
||||
hl_lines: Any
|
||||
hl_color: Any
|
||||
drawables: Any
|
||||
def get_style_defs(self, arg: str = ...) -> None: ...
|
||||
def get_style_defs(self, arg: str = "") -> None: ...
|
||||
def format(self, tokensource, outfile) -> None: ...
|
||||
|
||||
class GifImageFormatter(ImageFormatter[_T]):
|
||||
|
||||
@@ -23,7 +23,7 @@ class LatexFormatter(Formatter[_T]):
|
||||
left: Any
|
||||
right: Any
|
||||
envname: Any
|
||||
def get_style_defs(self, arg: str = ...): ...
|
||||
def get_style_defs(self, arg: str = ""): ...
|
||||
def format_unencoded(self, tokensource, outfile) -> None: ...
|
||||
|
||||
class LatexEmbeddedLexer(Lexer):
|
||||
|
||||
@@ -13,11 +13,11 @@ class EscapeSequence:
|
||||
italic: Any
|
||||
def __init__(
|
||||
self,
|
||||
fg: Incomplete | None = ...,
|
||||
bg: Incomplete | None = ...,
|
||||
bold: bool = ...,
|
||||
underline: bool = ...,
|
||||
italic: bool = ...,
|
||||
fg: Incomplete | None = None,
|
||||
bg: Incomplete | None = None,
|
||||
bold: bool = False,
|
||||
underline: bool = False,
|
||||
italic: bool = False,
|
||||
) -> None: ...
|
||||
def escape(self, attrs): ...
|
||||
def color_string(self): ...
|
||||
|
||||
@@ -26,7 +26,7 @@ class Lexer(metaclass=LexerMeta):
|
||||
filters: Any
|
||||
def __init__(self, **options) -> None: ...
|
||||
def add_filter(self, filter_, **options) -> None: ...
|
||||
def get_tokens(self, text: str, unfiltered: bool = ...) -> Iterator[tuple[_TokenType, str]]: ...
|
||||
def get_tokens(self, text: str, unfiltered: bool = False) -> Iterator[tuple[_TokenType, str]]: ...
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class DelegatingLexer(Lexer):
|
||||
@@ -47,9 +47,9 @@ class combined(tuple[Any, ...]):
|
||||
|
||||
class _PseudoMatch:
|
||||
def __init__(self, start, text) -> None: ...
|
||||
def start(self, arg: Incomplete | None = ...): ...
|
||||
def end(self, arg: Incomplete | None = ...): ...
|
||||
def group(self, arg: Incomplete | None = ...): ...
|
||||
def start(self, arg: Incomplete | None = None): ...
|
||||
def end(self, arg: Incomplete | None = None): ...
|
||||
def group(self, arg: Incomplete | None = None): ...
|
||||
def groups(self): ...
|
||||
def groupdict(self): ...
|
||||
|
||||
@@ -69,32 +69,32 @@ class words(Future):
|
||||
words: Any
|
||||
prefix: Any
|
||||
suffix: Any
|
||||
def __init__(self, words, prefix: str = ..., suffix: str = ...) -> None: ...
|
||||
def __init__(self, words, prefix: str = "", suffix: str = "") -> None: ...
|
||||
def get(self): ...
|
||||
|
||||
class RegexLexerMeta(LexerMeta):
|
||||
def process_tokendef(cls, name, tokendefs: Incomplete | None = ...): ...
|
||||
def process_tokendef(cls, name, tokendefs: Incomplete | None = None): ...
|
||||
def get_tokendefs(cls): ...
|
||||
def __call__(cls, *args, **kwds): ...
|
||||
|
||||
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
|
||||
flags: Any
|
||||
tokens: Any
|
||||
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ("root",)) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class LexerContext:
|
||||
text: Any
|
||||
pos: Any
|
||||
end: Any
|
||||
stack: Any
|
||||
def __init__(self, text, pos, stack: Incomplete | None = ..., end: Incomplete | None = ...) -> None: ...
|
||||
def __init__(self, text, pos, stack: Incomplete | None = None, end: Incomplete | None = None) -> None: ...
|
||||
|
||||
class ExtendedRegexLexer(RegexLexer):
|
||||
def get_tokens_unprocessed( # type: ignore[override]
|
||||
self, text: str | None = ..., context: LexerContext | None = ...
|
||||
self, text: str | None = None, context: LexerContext | None = None
|
||||
) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class ProfilingRegexLexerMeta(RegexLexerMeta): ...
|
||||
|
||||
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
|
||||
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ("root",)) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
@@ -4,13 +4,13 @@ from typing import Any
|
||||
|
||||
from pygments.lexer import Lexer, LexerMeta
|
||||
|
||||
def get_all_lexers(plugins: bool = ...) -> Iterator[tuple[str, tuple[str, ...], tuple[str, ...], tuple[str, ...]]]: ...
|
||||
def get_all_lexers(plugins: bool = True) -> Iterator[tuple[str, tuple[str, ...], tuple[str, ...], tuple[str, ...]]]: ...
|
||||
def find_lexer_class(name: str) -> LexerMeta | None: ...
|
||||
def find_lexer_class_by_name(_alias: str) -> LexerMeta: ...
|
||||
def get_lexer_by_name(_alias: str, **options: Any) -> Lexer: ...
|
||||
def load_lexer_from_file(filename: FileDescriptorOrPath, lexername: str = ..., **options: Any) -> Lexer: ...
|
||||
def find_lexer_class_for_filename(_fn: StrPath, code: str | bytes | None = ...) -> LexerMeta | None: ...
|
||||
def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = ..., **options: Any) -> Lexer: ...
|
||||
def load_lexer_from_file(filename: FileDescriptorOrPath, lexername: str = "CustomLexer", **options: Any) -> Lexer: ...
|
||||
def find_lexer_class_for_filename(_fn: StrPath, code: str | bytes | None = None) -> LexerMeta | None: ...
|
||||
def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = None, **options: Any) -> Lexer: ...
|
||||
def get_lexer_for_mimetype(_mime: str, **options: Any) -> Lexer: ...
|
||||
def guess_lexer_for_filename(_fn: StrPath, _text: str, **options: Any) -> LexerMeta: ...
|
||||
def guess_lexer(_text: str | bytes, **options: Any) -> Lexer: ...
|
||||
|
||||
@@ -1 +1 @@
|
||||
def get_filetype_from_buffer(buf, max_lines: int = ...): ...
|
||||
def get_filetype_from_buffer(buf, max_lines: int = 5): ...
|
||||
|
||||
@@ -5,4 +5,4 @@ FIRST_ELEMENT: Any
|
||||
|
||||
def make_charset(letters): ...
|
||||
def regex_opt_inner(strings, open_paren): ...
|
||||
def regex_opt(strings, prefix: str = ..., suffix: str = ...): ...
|
||||
def regex_opt(strings, prefix: str = "", suffix: str = ""): ...
|
||||
|
||||
@@ -10,7 +10,7 @@ class Scanner:
|
||||
flags: Any
|
||||
last: Any
|
||||
match: Any
|
||||
def __init__(self, text, flags: int = ...) -> None: ...
|
||||
def __init__(self, text, flags: int = 0) -> None: ...
|
||||
@property
|
||||
def eos(self): ...
|
||||
def check(self, pattern): ...
|
||||
|
||||
@@ -10,10 +10,10 @@ xml_decl_re: Any
|
||||
class ClassNotFound(ValueError): ...
|
||||
class OptionError(Exception): ...
|
||||
|
||||
def get_choice_opt(options, optname, allowed, default: Incomplete | None = ..., normcase: bool = ...): ...
|
||||
def get_bool_opt(options, optname, default: Incomplete | None = ...): ...
|
||||
def get_int_opt(options, optname, default: Incomplete | None = ...): ...
|
||||
def get_list_opt(options, optname, default: Incomplete | None = ...): ...
|
||||
def get_choice_opt(options, optname, allowed, default: Incomplete | None = None, normcase: bool = False): ...
|
||||
def get_bool_opt(options, optname, default: Incomplete | None = None): ...
|
||||
def get_int_opt(options, optname, default: Incomplete | None = None): ...
|
||||
def get_list_opt(options, optname, default: Incomplete | None = None): ...
|
||||
def docstring_headline(obj): ...
|
||||
def make_analysator(f): ...
|
||||
def shebang_matches(text, regex): ...
|
||||
@@ -21,8 +21,8 @@ def doctype_matches(text, regex): ...
|
||||
def html_doctype_matches(text): ...
|
||||
def looks_like_xml(text): ...
|
||||
def surrogatepair(c): ...
|
||||
def format_lines(var_name, seq, raw: bool = ..., indent_level: int = ...): ...
|
||||
def duplicates_removed(it, already_seen=...): ...
|
||||
def format_lines(var_name, seq, raw: bool = False, indent_level: int = 0): ...
|
||||
def duplicates_removed(it, already_seen=()): ...
|
||||
|
||||
class Future:
|
||||
def get(self) -> None: ...
|
||||
|
||||
Reference in New Issue
Block a user