[Pygments] Complete stubs for filter and filters modules (#15616)

This commit is contained in:
Brian Schubert
2026-04-06 21:47:39 -04:00
committed by GitHub
parent d3235e3d7b
commit 4f08613245
2 changed files with 81 additions and 42 deletions
+20 -9
View File
@@ -1,18 +1,29 @@
from _typeshed import Incomplete
from collections.abc import Iterable, Iterator
from typing import Any, ClassVar, Protocol, type_check_only
from pygments.lexer import Lexer
from pygments.token import _TokenType
def apply_filters(stream, filters, lexer=None): ...
def simplefilter(f): ...
@type_check_only
class _SimpleFilterFunction(Protocol):
# Function that can looked up as a method on a FunctionFilter subclass.
def __call__(
self, self_: FunctionFilter, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]], options: dict[str, Any], /
) -> Iterator[tuple[_TokenType, str]]: ...
def apply_filters(
stream: Iterable[tuple[_TokenType, str]], filters: Iterable[Filter], lexer: Lexer | None = None
) -> Iterator[tuple[_TokenType, str]]: ...
def simplefilter(f: _SimpleFilterFunction) -> type[FunctionFilter]: ...
class Filter:
options: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
options: dict[str, Any] # Arbitrary values used by subclasses.
def __init__(self, **options: Any) -> None: ... # ditto.
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class FunctionFilter(Filter):
function: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
# Set to None in class, but overridden with a non-None value in the subclasses created by @simplefilter.
function: ClassVar[_SimpleFilterFunction]
# 'options' gets passed as a dict to 'function'; valid types depends on the wrapped function's signature.
def __init__(self, **options: Any) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
+61 -33
View File
@@ -1,58 +1,86 @@
from _typeshed import Incomplete
from collections.abc import Generator, Iterable, Iterator
from _typeshed import ConvertibleToInt
from collections.abc import Callable, Generator, Iterable, Iterator
from re import Pattern
from typing import Any, ClassVar, Final, Literal
from pygments.filter import Filter
from pygments.lexer import Lexer
from pygments.token import _TokenType
def find_filter_class(filtername): ...
def get_filter_by_name(filtername, **options): ...
def find_filter_class(filtername: str) -> type[Filter] | None: ...
# Keyword arguments are forwarded to the filter class.
def get_filter_by_name(filtername: str, **options: Any) -> Filter: ...
def get_all_filters() -> Generator[str]: ...
class CodeTagFilter(Filter):
tag_re: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
tag_re: Pattern[str]
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(
self, *, codetags: str | list[str] | tuple[str, ...] = ["XXX", "TODO", "FIXME", "BUG", "NOTE"], **options: Any
) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class SymbolFilter(Filter):
latex_symbols: Incomplete
isabelle_symbols: Incomplete
lang_map: Incomplete
symbols: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
latex_symbols: ClassVar[dict[str, str]]
isabelle_symbols: ClassVar[dict[str, str]]
lang_map: ClassVar[dict[Literal["isabelle", "latex"], dict[str, str]]]
symbols: dict[str, str] # One of latex_symbols or isabelle_symbols.
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(self, *, lang: Literal["isabelle", "latex"] = "isabelle", **options: Any) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class KeywordCaseFilter(Filter):
convert: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
convert: Callable[[str], str]
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(self, *, case: Literal["lower", "upper", "capitalize"] = "lower", **options: Any) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class NameHighlightFilter(Filter):
names: Incomplete
tokentype: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
names: set[str]
tokentype: _TokenType
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(
self, *, names: str | list[str] | tuple[str, ...] = [], tokentype: str | _TokenType | None = None, **options: Any
) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class ErrorToken(Exception): ...
class RaiseOnErrorTokenFilter(Filter):
exception: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
exception: type[Exception]
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(self, *, excclass: type[Exception] = ..., **options: Any) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class VisibleWhitespaceFilter(Filter):
wstt: Incomplete
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
spaces: str
tabs: str
newlines: str
wstt: bool
def __init__(
self,
*,
spaces: str | bool = False,
tabs: str | bool = False,
newlines: str | bool = False,
tabsize: ConvertibleToInt = 8,
wstokentype: bool | int | str = True, # Any value accepted by get_bool_opt.
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
**options: Any,
) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class GobbleFilter(Filter):
n: Incomplete
def __init__(self, **options) -> None: ...
def gobble(self, value, left): ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
n: int
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(self, *, n: ConvertibleToInt = 0, **options: Any) -> None: ...
def gobble(self, value: str, left: int) -> tuple[str, int]: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
class TokenMergeFilter(Filter):
def __init__(self, **options) -> None: ...
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
# Arbitrary additional keyword arguments are permitted and are stored in self.options.
def __init__(self, **options: Any) -> None: ...
def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
FILTERS: Incomplete
FILTERS: Final[dict[str, type[Filter]]]