mirror of
https://github.com/davidhalter/typeshed.git
synced 2026-01-23 19:41:51 +08:00
Bump pygments to 2.17 (#11051)
* Add some previously missing lexers. * Improve lexer type annotations.
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
version = "2.16.*"
|
||||
version = "2.17.*"
|
||||
upstream_repository = "https://github.com/pygments/pygments"
|
||||
requires = ["types-docutils", "types-setuptools"]
|
||||
partial_stub = true
|
||||
|
||||
@@ -1,47 +1,49 @@
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Iterable, Iterator, Sequence
|
||||
from typing import Any
|
||||
from re import RegexFlag
|
||||
from typing import ClassVar
|
||||
|
||||
from pygments.token import _TokenType
|
||||
from pygments.util import Future
|
||||
|
||||
class LexerMeta(type):
|
||||
def __new__(cls, name, bases, d): ...
|
||||
def analyse_text(self, text) -> None: ... # actually defined in class Lexer
|
||||
def analyse_text(self, text: str) -> float: ... # actually defined in class Lexer
|
||||
# ClassVars of Lexer, but same situation as with StyleMeta and Style
|
||||
name: str
|
||||
aliases: Sequence[str] # not intended mutable
|
||||
filenames: Sequence[str]
|
||||
alias_filenames: Sequence[str]
|
||||
mimetypes: Sequence[str]
|
||||
priority: int
|
||||
priority: float
|
||||
url: str | None
|
||||
|
||||
class Lexer(metaclass=LexerMeta):
|
||||
options: Any
|
||||
stripnl: Any
|
||||
stripall: Any
|
||||
ensurenl: Any
|
||||
tabsize: Any
|
||||
encoding: Any
|
||||
filters: Any
|
||||
options: Incomplete
|
||||
stripnl: Incomplete
|
||||
stripall: Incomplete
|
||||
ensurenl: Incomplete
|
||||
tabsize: Incomplete
|
||||
encoding: Incomplete
|
||||
filters: Incomplete
|
||||
def __init__(self, **options) -> None: ...
|
||||
def add_filter(self, filter_, **options) -> None: ...
|
||||
def get_tokens(self, text: str, unfiltered: bool = False) -> Iterator[tuple[_TokenType, str]]: ...
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class DelegatingLexer(Lexer):
|
||||
root_lexer: Any
|
||||
language_lexer: Any
|
||||
needle: Any
|
||||
root_lexer: Incomplete
|
||||
language_lexer: Incomplete
|
||||
needle: Incomplete
|
||||
def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ...
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class include(str): ...
|
||||
class _inherit: ...
|
||||
|
||||
inherit: Any
|
||||
inherit: Incomplete
|
||||
|
||||
class combined(tuple[Any, ...]):
|
||||
class combined(tuple[Incomplete, ...]):
|
||||
def __new__(cls, *args): ...
|
||||
def __init__(self, *args) -> None: ...
|
||||
|
||||
@@ -57,18 +59,18 @@ def bygroups(*args): ...
|
||||
|
||||
class _This: ...
|
||||
|
||||
this: Any
|
||||
this: Incomplete
|
||||
|
||||
def using(_other, **kwargs): ...
|
||||
|
||||
class default:
|
||||
state: Any
|
||||
state: Incomplete
|
||||
def __init__(self, state) -> None: ...
|
||||
|
||||
class words(Future):
|
||||
words: Any
|
||||
prefix: Any
|
||||
suffix: Any
|
||||
words: Incomplete
|
||||
prefix: Incomplete
|
||||
suffix: Incomplete
|
||||
def __init__(self, words, prefix: str = "", suffix: str = "") -> None: ...
|
||||
def get(self): ...
|
||||
|
||||
@@ -78,15 +80,15 @@ class RegexLexerMeta(LexerMeta):
|
||||
def __call__(cls, *args, **kwds): ...
|
||||
|
||||
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
|
||||
flags: Any
|
||||
tokens: Any
|
||||
flags: ClassVar[RegexFlag]
|
||||
tokens: ClassVar[dict[str, list[Incomplete]]]
|
||||
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ("root",)) -> Iterator[tuple[int, _TokenType, str]]: ...
|
||||
|
||||
class LexerContext:
|
||||
text: Any
|
||||
pos: Any
|
||||
end: Any
|
||||
stack: Any
|
||||
text: Incomplete
|
||||
pos: Incomplete
|
||||
end: Incomplete
|
||||
stack: Incomplete
|
||||
def __init__(self, text, pos, stack: Incomplete | None = None, end: Incomplete | None = None) -> None: ...
|
||||
|
||||
class ExtendedRegexLexer(RegexLexer):
|
||||
|
||||
40
stubs/Pygments/pygments/lexers/javascript.pyi
Normal file
40
stubs/Pygments/pygments/lexers/javascript.pyi
Normal file
@@ -0,0 +1,40 @@
|
||||
from collections.abc import Iterator
|
||||
from typing_extensions import Final
|
||||
|
||||
from ..lexer import Lexer, RegexLexer
|
||||
from ..token import _TokenType
|
||||
|
||||
__all__ = [
|
||||
"JavascriptLexer",
|
||||
"KalLexer",
|
||||
"LiveScriptLexer",
|
||||
"DartLexer",
|
||||
"TypeScriptLexer",
|
||||
"LassoLexer",
|
||||
"ObjectiveJLexer",
|
||||
"CoffeeScriptLexer",
|
||||
"MaskLexer",
|
||||
"EarlGreyLexer",
|
||||
"JuttleLexer",
|
||||
"NodeConsoleLexer",
|
||||
]
|
||||
|
||||
JS_IDENT_START: Final[str]
|
||||
JS_IDENT_PART: Final[str]
|
||||
JS_IDENT: Final[str]
|
||||
|
||||
class JavascriptLexer(RegexLexer): ...
|
||||
class TypeScriptLexer(JavascriptLexer): ...
|
||||
class KalLexer(RegexLexer): ...
|
||||
class LiveScriptLexer(RegexLexer): ...
|
||||
class DartLexer(RegexLexer): ...
|
||||
|
||||
class LassoLexer(RegexLexer):
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore[override]
|
||||
|
||||
class ObjectiveJLexer(RegexLexer): ...
|
||||
class CoffeeScriptLexer(RegexLexer): ...
|
||||
class MaskLexer(RegexLexer): ...
|
||||
class EarlGreyLexer(RegexLexer): ...
|
||||
class JuttleLexer(RegexLexer): ...
|
||||
class NodeConsoleLexer(Lexer): ...
|
||||
5
stubs/Pygments/pygments/lexers/jsx.pyi
Normal file
5
stubs/Pygments/pygments/lexers/jsx.pyi
Normal file
@@ -0,0 +1,5 @@
|
||||
from .javascript import JavascriptLexer
|
||||
|
||||
__all__ = ["JsxLexer"]
|
||||
|
||||
class JsxLexer(JavascriptLexer): ...
|
||||
10
stubs/Pygments/pygments/lexers/kusto.pyi
Normal file
10
stubs/Pygments/pygments/lexers/kusto.pyi
Normal file
@@ -0,0 +1,10 @@
|
||||
from typing_extensions import Final
|
||||
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["KustoLexer"]
|
||||
|
||||
KUSTO_KEYWORDS: Final[list[str]]
|
||||
KUSTO_PUNCTUATION: Final[list[str]]
|
||||
|
||||
class KustoLexer(RegexLexer): ...
|
||||
6
stubs/Pygments/pygments/lexers/ldap.pyi
Normal file
6
stubs/Pygments/pygments/lexers/ldap.pyi
Normal file
@@ -0,0 +1,6 @@
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["LdifLexer", "LdaprcLexer"]
|
||||
|
||||
class LdifLexer(RegexLexer): ...
|
||||
class LdaprcLexer(RegexLexer): ...
|
||||
7
stubs/Pygments/pygments/lexers/lean.pyi
Normal file
7
stubs/Pygments/pygments/lexers/lean.pyi
Normal file
@@ -0,0 +1,7 @@
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["Lean3Lexer"]
|
||||
|
||||
class Lean3Lexer(RegexLexer): ...
|
||||
|
||||
LeanLexer = Lean3Lexer
|
||||
96
stubs/Pygments/pygments/lexers/lisp.pyi
Normal file
96
stubs/Pygments/pygments/lexers/lisp.pyi
Normal file
@@ -0,0 +1,96 @@
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Iterator
|
||||
from typing import ClassVar
|
||||
|
||||
from ..lexer import RegexLexer
|
||||
from ..token import _TokenType
|
||||
|
||||
__all__ = [
|
||||
"SchemeLexer",
|
||||
"CommonLispLexer",
|
||||
"HyLexer",
|
||||
"RacketLexer",
|
||||
"NewLispLexer",
|
||||
"EmacsLispLexer",
|
||||
"ShenLexer",
|
||||
"CPSALexer",
|
||||
"XtlangLexer",
|
||||
"FennelLexer",
|
||||
]
|
||||
|
||||
class SchemeLexer(RegexLexer):
|
||||
valid_name: ClassVar[str]
|
||||
token_end: ClassVar[str]
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore[override]
|
||||
number_rules: ClassVar[dict[Incomplete, Incomplete]]
|
||||
def decimal_cb(self, match) -> Iterator[tuple[Incomplete, Incomplete, Incomplete]]: ...
|
||||
|
||||
class CommonLispLexer(RegexLexer):
|
||||
nonmacro: ClassVar[str]
|
||||
constituent: ClassVar[str]
|
||||
terminated: ClassVar[str]
|
||||
symbol: ClassVar[str]
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore[override]
|
||||
|
||||
class HyLexer(RegexLexer):
|
||||
special_forms: ClassVar[tuple[str, ...]]
|
||||
declarations: ClassVar[tuple[str, ...]]
|
||||
hy_builtins: ClassVar[tuple[str, ...]]
|
||||
hy_core: ClassVar[tuple[str, ...]]
|
||||
builtins: ClassVar[tuple[str, ...]]
|
||||
valid_name: ClassVar[str]
|
||||
|
||||
class RacketLexer(RegexLexer): ...
|
||||
|
||||
class NewLispLexer(RegexLexer):
|
||||
builtins: ClassVar[tuple[str, ...]]
|
||||
valid_name: ClassVar[str]
|
||||
|
||||
class EmacsLispLexer(RegexLexer):
|
||||
nonmacro: ClassVar[str]
|
||||
constituent: ClassVar[str]
|
||||
terminated: ClassVar[str]
|
||||
symbol: ClassVar[str]
|
||||
macros: ClassVar[set[str]]
|
||||
special_forms: ClassVar[set[str]]
|
||||
builtin_function: ClassVar[set[str]]
|
||||
builtin_function_highlighted: ClassVar[set[str]]
|
||||
lambda_list_keywords: ClassVar[set[str]]
|
||||
error_keywords: ClassVar[set[str]]
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore[override]
|
||||
|
||||
class ShenLexer(RegexLexer):
|
||||
DECLARATIONS: ClassVar[tuple[str, ...]]
|
||||
SPECIAL_FORMS: ClassVar[tuple[str, ...]]
|
||||
BUILTINS: ClassVar[tuple[str, ...]]
|
||||
BUILTINS_ANYWHERE: ClassVar[tuple[str, ...]]
|
||||
MAPPINGS: ClassVar[dict[str, Incomplete]]
|
||||
|
||||
valid_symbol_chars: ClassVar[str]
|
||||
valid_name: ClassVar[str]
|
||||
symbol_name: ClassVar[str]
|
||||
variable: ClassVar[str]
|
||||
|
||||
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore[override]
|
||||
|
||||
class CPSALexer(RegexLexer):
|
||||
valid_name: ClassVar[str]
|
||||
|
||||
class XtlangLexer(RegexLexer):
|
||||
common_keywords: ClassVar[tuple[str, ...]]
|
||||
scheme_keywords: ClassVar[tuple[str, ...]]
|
||||
xtlang_bind_keywords: ClassVar[tuple[str, ...]]
|
||||
xtlang_keywords: ClassVar[tuple[str, ...]]
|
||||
common_functions: ClassVar[tuple[str, ...]]
|
||||
scheme_functions: ClassVar[tuple[str, ...]]
|
||||
xtlang_functions: ClassVar[tuple[str, ...]]
|
||||
|
||||
valid_scheme_name: ClassVar[str]
|
||||
valid_xtlang_name: ClassVar[str]
|
||||
valid_xtlang_type: ClassVar[str]
|
||||
|
||||
class FennelLexer(RegexLexer):
|
||||
special_forms: ClassVar[tuple[str, ...]]
|
||||
declarations: ClassVar[tuple[str, ...]]
|
||||
builtins: ClassVar[tuple[str, ...]]
|
||||
valid_name: ClassVar[str]
|
||||
11
stubs/Pygments/pygments/lexers/prql.pyi
Normal file
11
stubs/Pygments/pygments/lexers/prql.pyi
Normal file
@@ -0,0 +1,11 @@
|
||||
from _typeshed import Incomplete
|
||||
from typing import ClassVar
|
||||
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["PrqlLexer"]
|
||||
|
||||
class PrqlLexer(RegexLexer):
|
||||
builtinTypes: ClassVar[Incomplete]
|
||||
def innerstring_rules(ttype) -> list[tuple[str, Incomplete]]: ...
|
||||
def fstring_rules(ttype) -> list[tuple[str, Incomplete]]: ...
|
||||
19
stubs/Pygments/pygments/lexers/vip.pyi
Normal file
19
stubs/Pygments/pygments/lexers/vip.pyi
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import ClassVar
|
||||
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["VisualPrologLexer", "VisualPrologGrammarLexer"]
|
||||
|
||||
class VisualPrologBaseLexer(RegexLexer):
|
||||
minorendkw: ClassVar[tuple[str, ...]]
|
||||
minorkwexp: ClassVar[tuple[str, ...]]
|
||||
dockw: ClassVar[tuple[str, ...]]
|
||||
|
||||
class VisualPrologLexer(VisualPrologBaseLexer):
|
||||
majorkw: ClassVar[tuple[str, ...]]
|
||||
minorkw: ClassVar[tuple[str, ...]]
|
||||
directivekw: ClassVar[tuple[str, ...]]
|
||||
|
||||
class VisualPrologGrammarLexer(VisualPrologBaseLexer):
|
||||
majorkw: ClassVar[tuple[str, ...]]
|
||||
directivekw: ClassVar[tuple[str, ...]]
|
||||
5
stubs/Pygments/pygments/lexers/vyper.pyi
Normal file
5
stubs/Pygments/pygments/lexers/vyper.pyi
Normal file
@@ -0,0 +1,5 @@
|
||||
from ..lexer import RegexLexer
|
||||
|
||||
__all__ = ["VyperLexer"]
|
||||
|
||||
class VyperLexer(RegexLexer): ...
|
||||
@@ -36,5 +36,8 @@ class StyleMeta(type):
|
||||
line_number_special_color: str
|
||||
line_number_special_background_color: str
|
||||
styles: Mapping[_TokenType, str] # not intended to be mutable
|
||||
name: str
|
||||
aliases: list[str]
|
||||
web_style_gallery_exclude: bool
|
||||
|
||||
class Style(metaclass=StyleMeta): ...
|
||||
|
||||
Reference in New Issue
Block a user