Add stubs for antlr4 (#11192)

This commit is contained in:
Kevin Nowaczyk
2024-01-15 09:35:18 -05:00
committed by GitHub
parent 5367bab4ed
commit 64f7da9e45
60 changed files with 1981 additions and 0 deletions

View File

@@ -0,0 +1,6 @@
version = "4.13.*"
upstream_repository = "https://github.com/antlr/antlr4"
[tool.stubtest]
ignore_missing_stub = true
platforms = ["linux", "win32"]

View File

@@ -0,0 +1,39 @@
from _typeshed import Incomplete
from antlr4.error.Errors import IllegalStateException as IllegalStateException
from antlr4.Token import Token as Token
Lexer: Incomplete
class TokenStream: ...
class BufferedTokenStream(TokenStream):
tokenSource: Incomplete
tokens: Incomplete
index: int
fetchedEOF: bool
def __init__(self, tokenSource: Lexer) -> None: ...
def mark(self) -> int: ...
def release(self, marker: int) -> None: ...
def reset(self) -> None: ...
def seek(self, index: int) -> None: ...
def get(self, index: int) -> Token: ...
def consume(self) -> None: ...
def sync(self, i: int) -> bool: ...
def fetch(self, n: int) -> int: ...
def getTokens(self, start: int, stop: int, types: set[int] | None = None) -> list[Token]: ...
def LA(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
def LT(self, k: int) -> Token | None: ...
def adjustSeekIndex(self, i: int): ...
def lazyInit(self) -> None: ...
def setup(self) -> None: ...
def setTokenSource(self, tokenSource: Lexer): ...
def nextTokenOnChannel(self, i: int, channel: int): ...
def previousTokenOnChannel(self, i: int, channel: int): ...
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ...
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ...
def filterForChannel(self, left: int, right: int, channel: int): ...
def getSourceName(self): ...
def getText(self, start: int | None = None, stop: int | None = None): ...
def fill(self) -> None: ...

View File

@@ -0,0 +1,14 @@
from _typeshed import Incomplete
from antlr4.Token import CommonToken as CommonToken
class TokenFactory: ...
class CommonTokenFactory(TokenFactory):
DEFAULT: Incomplete
copyText: Incomplete
def __init__(self, copyText: bool = False) -> None: ...
def create(
self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int
): ...
def createThin(self, type: int, text: str): ...

View File

@@ -0,0 +1,13 @@
from _typeshed import Incomplete
from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.Token import Token as Token
class CommonTokenStream(BufferedTokenStream):
channel: Incomplete
def __init__(self, lexer: Lexer, channel: int = 0) -> None: ...
def adjustSeekIndex(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
def LT(self, k: int) -> Token | None: ...
def getNumberOfOnChannelTokens(self) -> int: ...

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete
from antlr4.InputStream import InputStream as InputStream
class FileStream(InputStream):
fileName: Incomplete
def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ...
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ...

View File

@@ -0,0 +1,21 @@
from _typeshed import Incomplete
from antlr4.Token import Token as Token
class InputStream:
name: str
strdata: Incomplete
data: Incomplete
def __init__(self, data: str) -> None: ...
@property
def index(self): ...
@property
def size(self): ...
def reset(self) -> None: ...
def consume(self) -> None: ...
def LA(self, offset: int): ...
def LT(self, offset: int): ...
def mark(self): ...
def release(self, marker: int): ...
def seek(self, _index: int): ...
def getText(self, start: int, stop: int): ...

View File

@@ -0,0 +1,19 @@
from antlr4.Token import Token as Token
class IntervalSet:
intervals: list[range] | None
readonly: bool
def __init__(self) -> None: ...
def __iter__(self): ...
def __getitem__(self, item): ...
def addOne(self, v: int): ...
def addRange(self, v: range): ...
def addSet(self, other: IntervalSet): ...
def reduce(self, k: int): ...
def complement(self, start: int, stop: int): ...
def __contains__(self, item) -> bool: ...
def __len__(self) -> int: ...
def removeRange(self, v) -> None: ...
def removeOne(self, v) -> None: ...
def toString(self, literalNames: list[str], symbolicNames: list[str]): ...
def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int): ...

View File

@@ -0,0 +1,26 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import ATNConfig as ATNConfig
from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState
from antlr4.atn.Transition import (
AbstractPredicateTransition as AbstractPredicateTransition,
NotSetTransition as NotSetTransition,
RuleTransition as RuleTransition,
WildcardTransition as WildcardTransition,
)
from antlr4.IntervalSet import IntervalSet as IntervalSet
from antlr4.PredictionContext import (
PredictionContext as PredictionContext,
PredictionContextFromRuleContext as PredictionContextFromRuleContext,
SingletonPredictionContext as SingletonPredictionContext,
)
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
class LL1Analyzer:
HIT_PRED: Incomplete
atn: Incomplete
def __init__(self, atn: ATN) -> None: ...
def getDecisionLookahead(self, s: ATNState): ...
def LOOK(self, s: ATNState, stopState: ATNState | None = None, ctx: RuleContext | None = None): ...

View File

@@ -0,0 +1,64 @@
from _typeshed import Incomplete
from typing import TextIO
from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator
from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory
from antlr4.error.Errors import (
IllegalStateException as IllegalStateException,
LexerNoViableAltException as LexerNoViableAltException,
RecognitionException as RecognitionException,
)
from antlr4.InputStream import InputStream as InputStream
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.Token import Token as Token
class TokenSource: ...
class Lexer(Recognizer, TokenSource):
DEFAULT_MODE: int
MORE: int
SKIP: int
DEFAULT_TOKEN_CHANNEL: Incomplete
HIDDEN: Incomplete
MIN_CHAR_VALUE: int
MAX_CHAR_VALUE: int
def __init__(self, input: InputStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def nextToken(self): ...
def skip(self) -> None: ...
def more(self) -> None: ...
def mode(self, m: int): ...
def pushMode(self, m: int): ...
def popMode(self): ...
@property
def inputStream(self): ...
@inputStream.setter
def inputStream(self, input: InputStream): ...
@property
def sourceName(self): ...
def emitToken(self, token: Token): ...
def emit(self): ...
def emitEOF(self): ...
@property
def type(self): ...
@type.setter
def type(self, type: int): ...
@property
def line(self): ...
@line.setter
def line(self, line: int): ...
@property
def column(self): ...
@column.setter
def column(self, column: int): ...
def getCharIndex(self): ...
@property
def text(self): ...
@text.setter
def text(self, txt: str): ...
def getAllTokens(self): ...
def notifyListeners(self, e: LexerNoViableAltException): ...
def getErrorDisplay(self, s: str): ...
def getErrorDisplayForChar(self, c: str): ...
def getCharErrorDisplay(self, c: str): ...
def recover(self, re: RecognitionException): ...

View File

@@ -0,0 +1,19 @@
from _typeshed import Incomplete
from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory
from antlr4.Lexer import TokenSource as TokenSource
from antlr4.Token import Token as Token
class ListTokenSource(TokenSource):
tokens: Incomplete
sourceName: Incomplete
pos: int
eofToken: Incomplete
def __init__(self, tokens: list[Token], sourceName: str | None = None) -> None: ...
@property
def column(self): ...
def nextToken(self): ...
@property
def line(self): ...
def getInputStream(self): ...
def getSourceName(self): ...

View File

@@ -0,0 +1,74 @@
from _typeshed import Incomplete
from typing import TextIO
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.CommonTokenFactory import TokenFactory as TokenFactory
from antlr4.error.Errors import (
RecognitionException as RecognitionException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.error.ErrorStrategy import DefaultErrorStrategy as DefaultErrorStrategy
from antlr4.InputStream import InputStream as InputStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher
from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode
class TraceListener(ParseTreeListener):
def __init__(self, parser) -> None: ...
def enterEveryRule(self, ctx) -> None: ...
def visitTerminal(self, node) -> None: ...
def visitErrorNode(self, node) -> None: ...
def exitEveryRule(self, ctx) -> None: ...
class Parser(Recognizer):
bypassAltsAtnCache: Incomplete
buildParseTrees: bool
def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def match(self, ttype: int): ...
def matchWildcard(self): ...
def getParseListeners(self): ...
def addParseListener(self, listener: ParseTreeListener): ...
def removeParseListener(self, listener: ParseTreeListener): ...
def removeParseListeners(self) -> None: ...
def triggerEnterRuleEvent(self) -> None: ...
def triggerExitRuleEvent(self) -> None: ...
def getNumberOfSyntaxErrors(self): ...
def getTokenFactory(self): ...
def setTokenFactory(self, factory: TokenFactory): ...
def getATNWithBypassAlts(self): ...
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ...
def getInputStream(self): ...
def setInputStream(self, input: InputStream): ...
def getTokenStream(self): ...
def setTokenStream(self, input: TokenStream): ...
def getCurrentToken(self): ...
def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ...
def consume(self): ...
def addContextToParseTree(self) -> None: ...
state: Incomplete
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
def exitRule(self) -> None: ...
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ...
def getPrecedence(self): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ...
def getInvokingContext(self, ruleIndex: int): ...
def precpred(self, localctx: RuleContext, precedence: int): ...
def inContext(self, context: str): ...
def isExpectedToken(self, symbol: int): ...
def getExpectedTokens(self): ...
def getExpectedTokensWithinCurrentRule(self): ...
def getRuleIndex(self, ruleName: str): ...
def getRuleInvocationStack(self, p: RuleContext | None = None): ...
def getDFAStrings(self): ...
def dumpDFA(self) -> None: ...
def getSourceName(self): ...
def setTrace(self, trace: bool): ...

View File

@@ -0,0 +1,36 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNState import ATNState as ATNState, LoopEndState as LoopEndState, StarLoopEntryState as StarLoopEntryState
from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator
from antlr4.atn.Transition import Transition as Transition
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.dfa.DFA import DFA as DFA
from antlr4.error.Errors import (
FailedPredicateException as FailedPredicateException,
RecognitionException as RecognitionException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.Lexer import Lexer as Lexer
from antlr4.Parser import Parser as Parser
from antlr4.ParserRuleContext import InterpreterRuleContext as InterpreterRuleContext, ParserRuleContext as ParserRuleContext
from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache
from antlr4.Token import Token as Token
class ParserInterpreter(Parser):
grammarFileName: Incomplete
atn: Incomplete
tokenNames: Incomplete
ruleNames: Incomplete
decisionToDFA: Incomplete
sharedContextCache: Incomplete
pushRecursionContextStates: Incomplete
def __init__(
self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream
) -> None: ...
state: Incomplete
def parse(self, startRuleIndex: int): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def getATNState(self): ...
def visitState(self, p: ATNState): ...
def visitRuleStopState(self, p: ATNState): ...

View File

@@ -0,0 +1,41 @@
from _typeshed import Incomplete
from collections.abc import Generator
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.tree.Tree import (
INVALID_INTERVAL as INVALID_INTERVAL,
ErrorNodeImpl as ErrorNodeImpl,
ParseTree as ParseTree,
ParseTreeListener as ParseTreeListener,
TerminalNode as TerminalNode,
TerminalNodeImpl as TerminalNodeImpl,
)
class ParserRuleContext(RuleContext):
children: Incomplete
start: Incomplete
stop: Incomplete
exception: Incomplete
def __init__(self, parent: ParserRuleContext | None = None, invokingStateNumber: int | None = None) -> None: ...
parentCtx: Incomplete
invokingState: Incomplete
def copyFrom(self, ctx: ParserRuleContext): ...
def enterRule(self, listener: ParseTreeListener): ...
def exitRule(self, listener: ParseTreeListener): ...
def addChild(self, child: ParseTree): ...
def removeLastChild(self) -> None: ...
def addTokenNode(self, token: Token): ...
def addErrorNode(self, badToken: Token): ...
def getChild(self, i: int, ttype: type | None = None): ...
def getChildren(self, predicate: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...
def getToken(self, ttype: int, i: int): ...
def getTokens(self, ttype: int): ...
def getTypedRuleContext(self, ctxType: type, i: int): ...
def getTypedRuleContexts(self, ctxType: type): ...
def getChildCount(self): ...
def getSourceInterval(self): ...
class InterpreterRuleContext(ParserRuleContext):
ruleIndex: Incomplete
def __init__(self, parent: ParserRuleContext, invokingStateNumber: int, ruleIndex: int) -> None: ...

View File

@@ -0,0 +1,87 @@
from _typeshed import Incomplete, SupportsLenAndGetItem
from antlr4.atn.ATN import ATN as ATN
from antlr4.error.Errors import IllegalStateException as IllegalStateException
from antlr4.RuleContext import RuleContext as RuleContext
class PredictionContext:
EMPTY: Incomplete
EMPTY_RETURN_STATE: int
globalNodeCount: int
id = globalNodeCount
cachedHashCode: Incomplete
def __init__(self, cachedHashCode: int) -> None: ...
def __len__(self) -> int: ...
def isEmpty(self): ...
def hasEmptyPath(self): ...
def getReturnState(self, index: int): ...
def __hash__(self): ...
def calculateHashCode(parent: PredictionContext, returnState: int): ...
def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]): ...
class PredictionContextCache:
cache: Incomplete
def __init__(self) -> None: ...
def add(self, ctx: PredictionContext): ...
def get(self, ctx: PredictionContext): ...
def __len__(self) -> int: ...
class SingletonPredictionContext(PredictionContext):
@staticmethod
def create(parent: PredictionContext, returnState: int): ...
parentCtx: Incomplete
returnState: Incomplete
def __init__(self, parent: PredictionContext, returnState: int) -> None: ...
def __len__(self) -> int: ...
def getParent(self, index: int): ...
def getReturnState(self, index: int): ...
def __eq__(self, other): ...
def __hash__(self): ...
class EmptyPredictionContext(SingletonPredictionContext):
def __init__(self) -> None: ...
def isEmpty(self): ...
def __eq__(self, other): ...
def __hash__(self): ...
class ArrayPredictionContext(PredictionContext):
parents: Incomplete
returnStates: Incomplete
def __init__(self, parents: list[PredictionContext], returnStates: list[int]) -> None: ...
def isEmpty(self): ...
def __len__(self) -> int: ...
def getParent(self, index: int): ...
def getReturnState(self, index: int): ...
def __eq__(self, other): ...
def __hash__(self): ...
def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext | None = None): ...
def merge(
a: PredictionContext,
b: PredictionContext,
rootIsWildcard: bool,
mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None,
): ...
def mergeSingletons(
a: SingletonPredictionContext,
b: SingletonPredictionContext,
rootIsWildcard: bool,
mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None,
): ...
def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ...
def mergeArrays(
a: ArrayPredictionContext,
b: ArrayPredictionContext,
rootIsWildcard: bool,
mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None,
): ...
def combineCommonParents(parents: SupportsLenAndGetItem[PredictionContext]): ...
def getCachedPredictionContext(
context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext]
): ...
def getAllContextNodes(
context: PredictionContext,
nodes: list[Incomplete] | None = None,
visited: dict[PredictionContext, PredictionContext] | None = None,
): ...

View File

@@ -0,0 +1,27 @@
from _typeshed import Incomplete
from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListener, ProxyErrorListener as ProxyErrorListener
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
class Recognizer:
tokenTypeMapCache: Incomplete
ruleIndexMapCache: Incomplete
def __init__(self) -> None: ...
def extractVersion(self, version): ...
def checkVersion(self, toolVersion) -> None: ...
def addErrorListener(self, listener) -> None: ...
def removeErrorListener(self, listener) -> None: ...
def removeErrorListeners(self) -> None: ...
def getTokenTypeMap(self): ...
def getRuleIndexMap(self): ...
def getTokenType(self, tokenName: str): ...
def getErrorHeader(self, e): ...
def getTokenErrorDisplay(self, t: Token): ...
def getErrorListenerDispatch(self): ...
def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ...
def precpred(self, localctx: RuleContext, precedence: int): ...
@property
def state(self): ...
@state.setter
def state(self, atnState: int): ...

View File

@@ -0,0 +1,28 @@
from _typeshed import Incomplete
from collections.abc import Generator
from antlr4.tree.Tree import INVALID_INTERVAL as INVALID_INTERVAL, ParseTreeVisitor as ParseTreeVisitor, RuleNode as RuleNode
from antlr4.tree.Trees import Trees as Trees
Parser: Incomplete
class RuleContext(RuleNode):
EMPTY: Incomplete
parentCtx: Incomplete
invokingState: Incomplete
def __init__(self, parent: RuleContext | None = None, invokingState: int = -1) -> None: ...
def depth(self): ...
def isEmpty(self): ...
def getSourceInterval(self): ...
def getRuleContext(self): ...
def getPayload(self): ...
def getText(self): ...
def getRuleIndex(self): ...
def getAltNumber(self): ...
def setAltNumber(self, altNumber: int): ...
def getChild(self, i: int): ...
def getChildCount(self): ...
def getChildren(self) -> Generator[Incomplete, None, None]: ...
def accept(self, visitor: ParseTreeVisitor): ...
def toStringTree(self, ruleNames: list[Incomplete] | None = None, recog: Parser | None = None): ...
def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ...

View File

@@ -0,0 +1,4 @@
from antlr4.InputStream import InputStream as InputStream
class StdinStream(InputStream):
def __init__(self, encoding: str = "ascii", errors: str = "strict") -> None: ...

View File

@@ -0,0 +1,48 @@
from _typeshed import Incomplete
class Token:
INVALID_TYPE: int
EPSILON: int
MIN_USER_TOKEN_TYPE: int
EOF: int
DEFAULT_CHANNEL: int
HIDDEN_CHANNEL: int
source: Incomplete
type: Incomplete
channel: Incomplete
start: Incomplete
stop: Incomplete
tokenIndex: Incomplete
line: Incomplete
column: Incomplete
def __init__(self) -> None: ...
@property
def text(self): ...
@text.setter
def text(self, text: str): ...
def getTokenSource(self): ...
def getInputStream(self): ...
class CommonToken(Token):
EMPTY_SOURCE: Incomplete
source: Incomplete
type: Incomplete
channel: Incomplete
start: Incomplete
stop: Incomplete
tokenIndex: int
line: Incomplete
column: Incomplete
def __init__(
self,
source: tuple[Incomplete, Incomplete] = (None, None),
type: int | None = None,
channel: int = 0,
start: int = -1,
stop: int = -1,
) -> None: ...
def clone(self): ...
@property
def text(self): ...
@text.setter
def text(self, text: str): ...

View File

@@ -0,0 +1,53 @@
from _typeshed import Incomplete
from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream
from antlr4.Token import Token as Token
class TokenStreamRewriter:
DEFAULT_PROGRAM_NAME: str
PROGRAM_INIT_SIZE: int
MIN_TOKEN_INDEX: int
tokens: Incomplete
programs: Incomplete
lastRewriteTokenIndexes: Incomplete
def __init__(self, tokens) -> None: ...
def getTokenStream(self): ...
def rollback(self, instruction_index, program_name) -> None: ...
def deleteProgram(self, program_name="default") -> None: ...
def insertAfterToken(self, token, text, program_name="default") -> None: ...
def insertAfter(self, index, text, program_name="default") -> None: ...
def insertBeforeIndex(self, index, text) -> None: ...
def insertBeforeToken(self, token, text, program_name="default") -> None: ...
def insertBefore(self, program_name, index, text) -> None: ...
def replaceIndex(self, index, text) -> None: ...
def replaceRange(self, from_idx, to_idx, text) -> None: ...
def replaceSingleToken(self, token, text) -> None: ...
def replaceRangeTokens(self, from_token, to_token, text, program_name="default") -> None: ...
def replace(self, program_name, from_idx, to_idx, text) -> None: ...
def deleteToken(self, token) -> None: ...
def deleteIndex(self, index) -> None: ...
def delete(self, program_name, from_idx, to_idx) -> None: ...
def lastRewriteTokenIndex(self, program_name="default"): ...
def setLastRewriteTokenIndex(self, program_name, i) -> None: ...
def getProgram(self, program_name): ...
def getDefaultText(self): ...
def getText(self, program_name, start: int, stop: int): ...
class RewriteOperation:
tokens: Incomplete
index: Incomplete
text: Incomplete
instructionIndex: int
def __init__(self, tokens, index, text: str = "") -> None: ...
def execute(self, buf): ...
class InsertBeforeOp(RewriteOperation):
def __init__(self, tokens, index, text: str = "") -> None: ...
def execute(self, buf): ...
class InsertAfterOp(InsertBeforeOp): ...
class ReplaceOp(RewriteOperation):
last_index: Incomplete
def __init__(self, from_idx, to_idx, tokens, text) -> None: ...
def execute(self, buf): ...

View File

@@ -0,0 +1,2 @@
def str_list(val) -> str: ...
def escapeWhitespace(s: str, escapeSpaces: bool): ...

View File

@@ -0,0 +1,32 @@
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode as PredictionMode
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream
from antlr4.dfa.DFA import DFA as DFA
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener as DiagnosticErrorListener
from antlr4.error.Errors import (
IllegalStateException as IllegalStateException,
NoViableAltException as NoViableAltException,
RecognitionException as RecognitionException,
)
from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy
from antlr4.FileStream import FileStream as FileStream
from antlr4.InputStream import InputStream as InputStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.Parser import Parser as Parser
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext, RuleContext as RuleContext
from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache
from antlr4.StdinStream import StdinStream as StdinStream
from antlr4.Token import Token as Token
from antlr4.tree.Tree import (
ErrorNode as ErrorNode,
ParseTreeListener as ParseTreeListener,
ParseTreeVisitor as ParseTreeVisitor,
ParseTreeWalker as ParseTreeWalker,
RuleNode as RuleNode,
TerminalNode as TerminalNode,
)
from antlr4.Utils import str_list as str_list

View File

@@ -0,0 +1,4 @@
from antlr4 import *
def beautify_lisp_string(in_string): ...
def main() -> None: ...

View File

@@ -0,0 +1,29 @@
from _typeshed import Incomplete
from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState
from antlr4.atn.ATNType import ATNType as ATNType
from antlr4.IntervalSet import IntervalSet as IntervalSet
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
class ATN:
INVALID_ALT_NUMBER: int
grammarType: Incomplete
maxTokenType: Incomplete
states: Incomplete
decisionToState: Incomplete
ruleToStartState: Incomplete
ruleToStopState: Incomplete
modeNameToStartState: Incomplete
ruleToTokenType: Incomplete
lexerActions: Incomplete
modeToStartState: Incomplete
def __init__(self, grammarType: ATNType, maxTokenType: int) -> None: ...
def nextTokensInContext(self, s: ATNState, ctx: RuleContext): ...
def nextTokensNoContext(self, s: ATNState): ...
def nextTokens(self, s: ATNState, ctx: RuleContext | None = None): ...
def addState(self, state: ATNState): ...
def removeState(self, state: ATNState): ...
def defineDecisionState(self, s: DecisionState): ...
def getDecisionState(self, decision: int): ...
def getExpectedTokens(self, stateNumber: int, ctx: RuleContext): ...

View File

@@ -0,0 +1,44 @@
from _typeshed import Incomplete
from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState
from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor
from antlr4.atn.SemanticContext import SemanticContext as SemanticContext
from antlr4.PredictionContext import PredictionContext as PredictionContext
class ATNConfig:
state: Incomplete
alt: Incomplete
context: Incomplete
semanticContext: Incomplete
reachesIntoOuterContext: Incomplete
precedenceFilterSuppressed: Incomplete
def __init__(
self,
state: ATNState | None = None,
alt: int | None = None,
context: PredictionContext | None = None,
semantic: SemanticContext | None = None,
config: ATNConfig | None = None,
) -> None: ...
def __eq__(self, other): ...
def __hash__(self): ...
def hashCodeForConfigSet(self): ...
def equalsForConfigSet(self, other): ...
class LexerATNConfig(ATNConfig):
lexerActionExecutor: Incomplete
passedThroughNonGreedyDecision: Incomplete
def __init__(
self,
state: ATNState,
alt: int | None = None,
context: PredictionContext | None = None,
semantic: SemanticContext = ...,
lexerActionExecutor: LexerActionExecutor | None = None,
config: LexerATNConfig | None = None,
) -> None: ...
def __hash__(self): ...
def __eq__(self, other): ...
def hashCodeForConfigSet(self): ...
def equalsForConfigSet(self, other): ...
def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState): ...

View File

@@ -0,0 +1,44 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import ATNConfig as ATNConfig
from antlr4.atn.SemanticContext import SemanticContext as SemanticContext
from antlr4.error.Errors import (
IllegalStateException as IllegalStateException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.PredictionContext import merge as merge
from antlr4.Utils import str_list as str_list
ATNSimulator: Incomplete
class ATNConfigSet:
configLookup: Incomplete
fullCtx: Incomplete
readonly: bool
configs: Incomplete
uniqueAlt: int
conflictingAlts: Incomplete
hasSemanticContext: bool
dipsIntoOuterContext: bool
cachedHashCode: int
def __init__(self, fullCtx: bool = True) -> None: ...
def __iter__(self): ...
def add(self, config: ATNConfig, mergeCache: Incomplete | None = None): ...
def getOrAdd(self, config: ATNConfig): ...
def getStates(self): ...
def getPredicates(self): ...
def get(self, i: int): ...
def optimizeConfigs(self, interpreter: ATNSimulator): ...
def addAll(self, coll: list[Incomplete]): ...
def __eq__(self, other): ...
def __hash__(self): ...
def hashConfigs(self): ...
def __len__(self) -> int: ...
def isEmpty(self): ...
def __contains__(self, config) -> bool: ...
def clear(self) -> None: ...
def setReadonly(self, readonly: bool): ...
class OrderedATNConfigSet(ATNConfigSet):
def __init__(self) -> None: ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
class ATNDeserializationOptions:
defaultOptions: Incomplete
readonly: bool
verifyATN: Incomplete
generateRuleBypassTransitions: Incomplete
def __init__(self, copyFrom: ATNDeserializationOptions | None = None) -> None: ...
def __setattr__(self, key, value) -> None: ...

View File

@@ -0,0 +1,48 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions
from antlr4.atn.ATNState import *
from antlr4.atn.ATNType import ATNType as ATNType
from antlr4.atn.LexerAction import *
from antlr4.atn.Transition import *
from antlr4.Token import Token as Token
SERIALIZED_VERSION: int
class ATNDeserializer:
deserializationOptions: Incomplete
def __init__(self, options: ATNDeserializationOptions | None = None) -> None: ...
data: Incomplete
pos: int
def deserialize(self, data: list[int]): ...
def checkVersion(self) -> None: ...
def readATN(self): ...
def readStates(self, atn: ATN): ...
def readRules(self, atn: ATN): ...
def readModes(self, atn: ATN): ...
def readSets(self, atn: ATN, sets: list[Incomplete]): ...
def readEdges(self, atn: ATN, sets: list[Incomplete]): ...
def readDecisions(self, atn: ATN): ...
def readLexerActions(self, atn: ATN): ...
def generateRuleBypassTransitions(self, atn: ATN): ...
def generateRuleBypassTransition(self, atn: ATN, idx: int): ...
def stateIsEndStateFor(self, state: ATNState, idx: int): ...
def markPrecedenceDecisions(self, atn: ATN): ...
def verifyATN(self, atn: ATN): ...
def checkCondition(self, condition: bool, message: Incomplete | None = None): ...
def readInt(self): ...
edgeFactories: Incomplete
def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]): ...
stateFactories: Incomplete
def stateFactory(self, type: int, ruleIndex: int): ...
CHANNEL: int
CUSTOM: int
MODE: int
MORE: int
POP_MODE: int
PUSH_MODE: int
SKIP: int
TYPE: int
actionFactories: Incomplete
def lexerActionFactory(self, type: int, data1: int, data2: int): ...

View File

@@ -0,0 +1,17 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.dfa.DFAState import DFAState as DFAState
from antlr4.PredictionContext import (
PredictionContext as PredictionContext,
PredictionContextCache as PredictionContextCache,
getCachedPredictionContext as getCachedPredictionContext,
)
class ATNSimulator:
ERROR: Incomplete
atn: Incomplete
sharedContextCache: Incomplete
def __init__(self, atn: ATN, sharedContextCache: PredictionContextCache) -> None: ...
def getCachedContext(self, context: PredictionContext): ...

View File

@@ -0,0 +1,99 @@
from _typeshed import Incomplete
from antlr4.atn.Transition import Transition as Transition
INITIAL_NUM_TRANSITIONS: int
class ATNState:
INVALID_TYPE: int
BASIC: int
RULE_START: int
BLOCK_START: int
PLUS_BLOCK_START: int
STAR_BLOCK_START: int
TOKEN_START: int
RULE_STOP: int
BLOCK_END: int
STAR_LOOP_BACK: int
STAR_LOOP_ENTRY: int
PLUS_LOOP_BACK: int
LOOP_END: int
serializationNames: Incomplete
INVALID_STATE_NUMBER: int
atn: Incomplete
stateNumber: Incomplete
stateType: Incomplete
ruleIndex: int
epsilonOnlyTransitions: bool
transitions: Incomplete
nextTokenWithinRule: Incomplete
def __init__(self) -> None: ...
def __hash__(self): ...
def __eq__(self, other): ...
def onlyHasEpsilonTransitions(self): ...
def isNonGreedyExitState(self): ...
def addTransition(self, trans: Transition, index: int = -1): ...
class BasicState(ATNState):
stateType: Incomplete
def __init__(self) -> None: ...
class DecisionState(ATNState):
decision: int
nonGreedy: bool
def __init__(self) -> None: ...
class BlockStartState(DecisionState):
endState: Incomplete
def __init__(self) -> None: ...
class BasicBlockStartState(BlockStartState):
stateType: Incomplete
def __init__(self) -> None: ...
class BlockEndState(ATNState):
stateType: Incomplete
startState: Incomplete
def __init__(self) -> None: ...
class RuleStopState(ATNState):
stateType: Incomplete
def __init__(self) -> None: ...
class RuleStartState(ATNState):
stateType: Incomplete
stopState: Incomplete
isPrecedenceRule: bool
def __init__(self) -> None: ...
class PlusLoopbackState(DecisionState):
stateType: Incomplete
def __init__(self) -> None: ...
class PlusBlockStartState(BlockStartState):
stateType: Incomplete
loopBackState: Incomplete
def __init__(self) -> None: ...
class StarBlockStartState(BlockStartState):
stateType: Incomplete
def __init__(self) -> None: ...
class StarLoopbackState(ATNState):
stateType: Incomplete
def __init__(self) -> None: ...
class StarLoopEntryState(DecisionState):
stateType: Incomplete
loopBackState: Incomplete
isPrecedenceDecision: Incomplete
def __init__(self) -> None: ...
class LoopEndState(ATNState):
stateType: Incomplete
loopBackState: Incomplete
def __init__(self) -> None: ...
class TokensStartState(DecisionState):
stateType: Incomplete
def __init__(self) -> None: ...

View File

@@ -0,0 +1,7 @@
from enum import IntEnum
class ATNType(IntEnum):
LEXER: int
PARSER: int
@classmethod
def fromOrdinal(cls, i: int): ...

View File

@@ -0,0 +1,87 @@
from _typeshed import Incomplete
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import LexerATNConfig as LexerATNConfig
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet, OrderedATNConfigSet as OrderedATNConfigSet
from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator
from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState
from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor
from antlr4.atn.Transition import Transition as Transition
from antlr4.dfa.DFA import DFA
from antlr4.dfa.DFAState import DFAState as DFAState
from antlr4.error.Errors import (
LexerNoViableAltException as LexerNoViableAltException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.InputStream import InputStream as InputStream
from antlr4.PredictionContext import (
PredictionContext as PredictionContext,
PredictionContextCache as PredictionContextCache,
SingletonPredictionContext as SingletonPredictionContext,
)
from antlr4.Token import Token as Token
class SimState:
def __init__(self) -> None: ...
index: int
line: int
column: int
dfaState: Incomplete
def reset(self) -> None: ...
class LexerATNSimulator(ATNSimulator):
debug: bool
dfa_debug: bool
MIN_DFA_EDGE: int
MAX_DFA_EDGE: int
ERROR: Incomplete
decisionToDFA: Incomplete
recog: Incomplete
startIndex: int
line: int
column: int
mode: Incomplete
DEFAULT_MODE: Incomplete
MAX_CHAR_VALUE: Incomplete
prevAccept: Incomplete
def __init__(self, recog, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ...
def copyState(self, simulator: LexerATNSimulator): ...
def match(self, input: InputStream, mode: int): ...
def reset(self) -> None: ...
def matchATN(self, input: InputStream): ...
def execATN(self, input: InputStream, ds0: DFAState): ...
def getExistingTargetState(self, s: DFAState, t: int): ...
def computeTargetState(self, input: InputStream, s: DFAState, t: int): ...
def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int): ...
def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int): ...
def accept(
self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int
): ...
def getReachableTarget(self, trans: Transition, t: int): ...
def computeStartState(self, input: InputStream, p: ATNState): ...
def closure(
self,
input: InputStream,
config: LexerATNConfig,
configs: ATNConfigSet,
currentAltReachedAcceptState: bool,
speculative: bool,
treatEofAsEpsilon: bool,
): ...
def getEpsilonTarget(
self,
input: InputStream,
config: LexerATNConfig,
t: Transition,
configs: ATNConfigSet,
speculative: bool,
treatEofAsEpsilon: bool,
): ...
def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool): ...
def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState): ...
def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState | None = None, cfgs: ATNConfigSet | None = None) -> DFAState: ...
def addDFAState(self, configs: ATNConfigSet) -> DFAState: ...
def getDFA(self, mode: int): ...
def getText(self, input: InputStream): ...
def consume(self, input: InputStream): ...
def getTokenName(self, t: int): ...

View File

@@ -0,0 +1,82 @@
from _typeshed import Incomplete
from enum import IntEnum
Lexer: Incomplete
class LexerActionType(IntEnum):
CHANNEL: int
CUSTOM: int
MODE: int
MORE: int
POP_MODE: int
PUSH_MODE: int
SKIP: int
TYPE: int
class LexerAction:
actionType: Incomplete
isPositionDependent: bool
def __init__(self, action: LexerActionType) -> None: ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerSkipAction(LexerAction):
INSTANCE: Incomplete
def __init__(self) -> None: ...
def execute(self, lexer: Lexer): ...
class LexerTypeAction(LexerAction):
type: Incomplete
def __init__(self, type: int) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerPushModeAction(LexerAction):
mode: Incomplete
def __init__(self, mode: int) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerPopModeAction(LexerAction):
INSTANCE: Incomplete
def __init__(self) -> None: ...
def execute(self, lexer: Lexer): ...
class LexerMoreAction(LexerAction):
INSTANCE: Incomplete
def __init__(self) -> None: ...
def execute(self, lexer: Lexer): ...
class LexerModeAction(LexerAction):
mode: Incomplete
def __init__(self, mode: int) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerCustomAction(LexerAction):
ruleIndex: Incomplete
actionIndex: Incomplete
isPositionDependent: bool
def __init__(self, ruleIndex: int, actionIndex: int) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerChannelAction(LexerAction):
channel: Incomplete
def __init__(self, channel: int) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...
class LexerIndexedCustomAction(LexerAction):
offset: Incomplete
action: Incomplete
isPositionDependent: bool
def __init__(self, offset: int, action: LexerAction) -> None: ...
def execute(self, lexer: Lexer): ...
def __hash__(self): ...
def __eq__(self, other): ...

View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
from antlr4.atn.LexerAction import LexerAction as LexerAction, LexerIndexedCustomAction as LexerIndexedCustomAction
from antlr4.InputStream import InputStream as InputStream
class LexerActionExecutor:
lexerActions: Incomplete
hashCode: Incomplete
def __init__(self, lexerActions: list[LexerAction] = []) -> None: ...
@staticmethod
def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ...
def fixOffsetBeforeMatch(self, offset: int): ...
def execute(self, lexer, input: InputStream, startIndex: int): ...
def __hash__(self): ...
def __eq__(self, other): ...

View File

@@ -0,0 +1,133 @@
from _typeshed import Incomplete
from antlr4 import DFA as DFA
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import ATNConfig as ATNConfig
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator
from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState, RuleStopState as RuleStopState
from antlr4.atn.PredictionMode import PredictionMode as PredictionMode
from antlr4.atn.SemanticContext import SemanticContext as SemanticContext, andContext as andContext, orContext as orContext
from antlr4.atn.Transition import (
ActionTransition as ActionTransition,
AtomTransition as AtomTransition,
NotSetTransition as NotSetTransition,
PrecedencePredicateTransition as PrecedencePredicateTransition,
PredicateTransition as PredicateTransition,
RuleTransition as RuleTransition,
SetTransition as SetTransition,
Transition as Transition,
)
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.dfa.DFAState import DFAState as DFAState, PredPrediction as PredPrediction
from antlr4.error.Errors import NoViableAltException as NoViableAltException
from antlr4.Parser import Parser as Parser
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.PredictionContext import (
PredictionContext as PredictionContext,
PredictionContextCache as PredictionContextCache,
PredictionContextFromRuleContext as PredictionContextFromRuleContext,
SingletonPredictionContext as SingletonPredictionContext,
)
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.Utils import str_list as str_list
class ParserATNSimulator(ATNSimulator):
debug: bool
trace_atn_sim: bool
dfa_debug: bool
retry_debug: bool
parser: Incomplete
decisionToDFA: Incomplete
predictionMode: Incomplete
mergeCache: Incomplete
def __init__(
self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache
) -> None: ...
def reset(self) -> None: ...
def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ...
def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ...
def getExistingTargetState(self, previousD: DFAState, t: int): ...
def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ...
def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ...
def execATNWithFullContext(
self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext
): ...
def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool): ...
def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool): ...
def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ...
def applyPrecedenceFilter(self, configs: ATNConfigSet): ...
def getReachableTarget(self, trans: Transition, ttype: int): ...
def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int): ...
def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]): ...
def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ...
def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ...
def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ...
def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool): ...
def closure(
self,
config: ATNConfig,
configs: ATNConfigSet,
closureBusy: set[Incomplete],
collectPredicates: bool,
fullCtx: bool,
treatEofAsEpsilon: bool,
): ...
def closureCheckingStopState(
self,
config: ATNConfig,
configs: ATNConfigSet,
closureBusy: set[Incomplete],
collectPredicates: bool,
fullCtx: bool,
depth: int,
treatEofAsEpsilon: bool,
): ...
def closure_(
self,
config: ATNConfig,
configs: ATNConfigSet,
closureBusy: set[Incomplete],
collectPredicates: bool,
fullCtx: bool,
depth: int,
treatEofAsEpsilon: bool,
): ...
def canDropLoopEntryEdgeInLeftRecursiveRule(self, config): ...
def getRuleName(self, index: int): ...
epsilonTargetMethods: Incomplete
def getEpsilonTarget(
self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool
): ...
def actionTransition(self, config: ATNConfig, t: ActionTransition): ...
def precedenceTransition(
self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool
): ...
def predTransition(
self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool
): ...
def ruleTransition(self, config: ATNConfig, t: RuleTransition): ...
def getConflictingAlts(self, configs: ATNConfigSet): ...
def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet): ...
def getTokenName(self, t: int): ...
def getLookaheadName(self, input: TokenStream): ...
def dumpDeadEndConfigs(self, nvae: NoViableAltException): ...
def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int): ...
def getUniqueAlt(self, configs: ATNConfigSet): ...
def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ...
def addDFAState(self, dfa: DFA, D: DFAState): ...
def reportAttemptingFullContext(
self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int
): ...
def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ...
def reportAmbiguity(
self,
dfa: DFA,
D: DFAState,
startIndex: int,
stopIndex: int,
exact: bool,
ambigAlts: set[Incomplete],
configs: ATNConfigSet,
): ...

View File

@@ -0,0 +1,41 @@
from collections.abc import Sequence
from enum import Enum
from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import ATNConfig as ATNConfig
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.atn.ATNState import RuleStopState as RuleStopState
from antlr4.atn.SemanticContext import SemanticContext as SemanticContext
class PredictionMode(Enum):
SLL: int
LL: int
LL_EXACT_AMBIG_DETECTION: int
@classmethod
def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet): ...
@classmethod
def hasConfigInRuleStopState(cls, configs: ATNConfigSet): ...
@classmethod
def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ...
@classmethod
def resolvesToJustOneViableAlt(cls, altsets: Sequence[set[int]]): ...
@classmethod
def allSubsetsConflict(cls, altsets: Sequence[set[int]]): ...
@classmethod
def hasNonConflictingAltSet(cls, altsets: Sequence[set[int]]): ...
@classmethod
def hasConflictingAltSet(cls, altsets: Sequence[set[int]]): ...
@classmethod
def allSubsetsEqual(cls, altsets: Sequence[set[int]]): ...
@classmethod
def getUniqueAlt(cls, altsets: Sequence[set[int]]): ...
@classmethod
def getAlts(cls, altsets: Sequence[set[int]]): ...
@classmethod
def getConflictingAltSubsets(cls, configs: ATNConfigSet): ...
@classmethod
def getStateToAltMap(cls, configs: ATNConfigSet): ...
@classmethod
def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ...
@classmethod
def getSingleViableAlt(cls, altsets: Sequence[set[int]]): ...

View File

@@ -0,0 +1,49 @@
from _typeshed import Incomplete
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.RuleContext import RuleContext as RuleContext
class SemanticContext:
NONE: Incomplete
def eval(self, parser: Recognizer, outerContext: RuleContext): ...
def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ...
def andContext(a: SemanticContext, b: SemanticContext): ...
def orContext(a: SemanticContext, b: SemanticContext): ...
def filterPrecedencePredicates(collection: set[SemanticContext]): ...
class EmptySemanticContext(SemanticContext): ...
class Predicate(SemanticContext):
ruleIndex: Incomplete
predIndex: Incomplete
isCtxDependent: Incomplete
def __init__(self, ruleIndex: int = -1, predIndex: int = -1, isCtxDependent: bool = False) -> None: ...
def eval(self, parser: Recognizer, outerContext: RuleContext): ...
def __hash__(self): ...
def __eq__(self, other): ...
class PrecedencePredicate(SemanticContext):
precedence: Incomplete
def __init__(self, precedence: int = 0) -> None: ...
def eval(self, parser: Recognizer, outerContext: RuleContext): ...
def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ...
def __lt__(self, other): ...
def __hash__(self): ...
def __eq__(self, other): ...
class AND(SemanticContext):
opnds: Incomplete
def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ...
def __eq__(self, other): ...
def __hash__(self): ...
def eval(self, parser: Recognizer, outerContext: RuleContext): ...
def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ...
class OR(SemanticContext):
opnds: Incomplete
def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ...
def __eq__(self, other): ...
def __hash__(self): ...
def eval(self, parser: Recognizer, outerContext: RuleContext): ...
def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ...

View File

@@ -0,0 +1,101 @@
from _typeshed import Incomplete
from antlr4.atn.ATNState import RuleStartState
from antlr4.IntervalSet import IntervalSet
class Transition:
EPSILON: int
RANGE: int
RULE: int
PREDICATE: int
ATOM: int
ACTION: int
SET: int
NOT_SET: int
WILDCARD: int
PRECEDENCE: int
serializationNames: Incomplete
serializationTypes: Incomplete
target: Incomplete
isEpsilon: bool
label: Incomplete
def __init__(self, target) -> None: ...
class AtomTransition(Transition):
label_: Incomplete
label: Incomplete
serializationType: Incomplete
def __init__(self, target, label: int) -> None: ...
def makeLabel(self): ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class RuleTransition(Transition):
ruleIndex: Incomplete
precedence: Incomplete
followState: Incomplete
serializationType: Incomplete
isEpsilon: bool
def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class EpsilonTransition(Transition):
serializationType: Incomplete
isEpsilon: bool
outermostPrecedenceReturn: Incomplete
def __init__(self, target, outermostPrecedenceReturn: int = -1) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class RangeTransition(Transition):
serializationType: Incomplete
start: Incomplete
stop: Incomplete
label: Incomplete
def __init__(self, target, start: int, stop: int) -> None: ...
def makeLabel(self): ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class AbstractPredicateTransition(Transition):
def __init__(self, target) -> None: ...
class PredicateTransition(AbstractPredicateTransition):
serializationType: Incomplete
ruleIndex: Incomplete
predIndex: Incomplete
isCtxDependent: Incomplete
isEpsilon: bool
def __init__(self, target, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
def getPredicate(self): ...
class ActionTransition(Transition):
serializationType: Incomplete
ruleIndex: Incomplete
actionIndex: Incomplete
isCtxDependent: Incomplete
isEpsilon: bool
def __init__(self, target, ruleIndex: int, actionIndex: int = -1, isCtxDependent: bool = False) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class SetTransition(Transition):
serializationType: Incomplete
label: Incomplete
def __init__(self, target, set: IntervalSet) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class NotSetTransition(SetTransition):
serializationType: Incomplete
def __init__(self, target, set: IntervalSet) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class WildcardTransition(Transition):
serializationType: Incomplete
def __init__(self, target) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
class PrecedencePredicateTransition(AbstractPredicateTransition):
serializationType: Incomplete
precedence: Incomplete
isEpsilon: bool
def __init__(self, target, precedence: int) -> None: ...
def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ...
def getPredicate(self): ...

View File

@@ -0,0 +1,21 @@
from _typeshed import Incomplete
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.atn.ATNState import DecisionState as DecisionState, StarLoopEntryState as StarLoopEntryState
from antlr4.dfa.DFAState import DFAState as DFAState
from antlr4.error.Errors import IllegalStateException as IllegalStateException
class DFA:
atnStartState: Incomplete
decision: Incomplete
s0: Incomplete
precedenceDfa: bool
def __init__(self, atnStartState: DecisionState, decision: int = 0) -> None: ...
def getPrecedenceStartState(self, precedence: int): ...
def setPrecedenceStartState(self, precedence: int, startState: DFAState): ...
def setPrecedenceDfa(self, precedenceDfa: bool): ...
@property
def states(self): ...
def sortedStates(self): ...
def toString(self, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None): ...
def toLexerString(self): ...

View File

@@ -0,0 +1,17 @@
from _typeshed import Incomplete
from antlr4 import DFA as DFA
from antlr4.dfa.DFAState import DFAState as DFAState
from antlr4.Utils import str_list as str_list
class DFASerializer:
dfa: Incomplete
literalNames: list[str] | None
symbolicNames: list[str] | None
def __init__(self, dfa: DFA, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> None: ...
def getEdgeLabel(self, i: int): ...
def getStateString(self, s: DFAState): ...
class LexerDFASerializer(DFASerializer):
def __init__(self, dfa: DFA) -> None: ...
def getEdgeLabel(self, i: int): ...

View File

@@ -0,0 +1,23 @@
from _typeshed import Incomplete
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.atn.SemanticContext import SemanticContext as SemanticContext
class PredPrediction:
alt: Incomplete
pred: Incomplete
def __init__(self, pred: SemanticContext, alt: int) -> None: ...
class DFAState:
stateNumber: Incomplete
configs: Incomplete
edges: Incomplete
isAcceptState: bool
prediction: int
lexerActionExecutor: Incomplete
requiresFullContext: bool
predicates: Incomplete
def __init__(self, stateNumber: int = -1, configs: ATNConfigSet = ...) -> None: ...
def getAltSet(self): ...
def __hash__(self): ...
def __eq__(self, other): ...

View File

@@ -0,0 +1,20 @@
from _typeshed import Incomplete
from antlr4 import DFA as DFA
from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet
from antlr4.error.ErrorListener import ErrorListener as ErrorListener
class DiagnosticErrorListener(ErrorListener):
exactOnly: Incomplete
def __init__(self, exactOnly: bool = True) -> None: ...
def reportAmbiguity(
self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet
): ...
def reportAttemptingFullContext(
self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet
): ...
def reportContextSensitivity(
self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet
): ...
def getDecisionDescription(self, recognizer, dfa: DFA): ...
def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet): ...

View File

@@ -0,0 +1,19 @@
from _typeshed import Incomplete
class ErrorListener:
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ...
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ...
def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ...
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ...
class ConsoleErrorListener(ErrorListener):
INSTANCE: Incomplete
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ...
class ProxyErrorListener(ErrorListener):
delegates: Incomplete
def __init__(self, delegates) -> None: ...
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ...
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ...
def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ...
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ...

View File

@@ -0,0 +1,56 @@
from _typeshed import Incomplete
from antlr4.atn.ATNState import ATNState as ATNState
from antlr4.error.Errors import (
FailedPredicateException as FailedPredicateException,
InputMismatchException as InputMismatchException,
NoViableAltException as NoViableAltException,
ParseCancellationException as ParseCancellationException,
RecognitionException as RecognitionException,
)
from antlr4.IntervalSet import IntervalSet as IntervalSet
from antlr4.Token import Token as Token
class ErrorStrategy:
def reset(self, recognizer): ...
def recoverInline(self, recognizer): ...
def recover(self, recognizer, e: RecognitionException): ...
def sync(self, recognizer): ...
def inErrorRecoveryMode(self, recognizer): ...
def reportError(self, recognizer, e: RecognitionException): ...
class DefaultErrorStrategy(ErrorStrategy):
errorRecoveryMode: bool
lastErrorIndex: int
lastErrorStates: Incomplete
nextTokensContext: Incomplete
nextTokenState: int
def __init__(self) -> None: ...
def reset(self, recognizer): ...
def beginErrorCondition(self, recognizer): ...
def inErrorRecoveryMode(self, recognizer): ...
def endErrorCondition(self, recognizer): ...
def reportMatch(self, recognizer): ...
def reportError(self, recognizer, e: RecognitionException): ...
def recover(self, recognizer, e: RecognitionException): ...
nextTokensState: Incomplete
def sync(self, recognizer): ...
def reportNoViableAlternative(self, recognizer, e: NoViableAltException): ...
def reportInputMismatch(self, recognizer, e: InputMismatchException): ...
def reportFailedPredicate(self, recognizer, e) -> None: ...
def reportUnwantedToken(self, recognizer): ...
def reportMissingToken(self, recognizer): ...
def recoverInline(self, recognizer): ...
def singleTokenInsertion(self, recognizer): ...
def singleTokenDeletion(self, recognizer): ...
def getMissingSymbol(self, recognizer): ...
def getExpectedTokens(self, recognizer): ...
def getTokenErrorDisplay(self, t: Token): ...
def escapeWSAndQuote(self, s: str): ...
def getErrorRecoverySet(self, recognizer): ...
def consumeUntil(self, recognizer, set_: set[int]): ...
class BailErrorStrategy(DefaultErrorStrategy):
def recover(self, recognizer, e: RecognitionException): ...
def recoverInline(self, recognizer): ...
def sync(self, recognizer): ...

View File

@@ -0,0 +1,64 @@
from _typeshed import Incomplete
from antlr4.InputStream import InputStream as InputStream
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.Recognizer import Recognizer as Recognizer
class UnsupportedOperationException(Exception):
def __init__(self, msg: str) -> None: ...
class IllegalStateException(Exception):
def __init__(self, msg: str) -> None: ...
class CancellationException(IllegalStateException):
def __init__(self, msg: str) -> None: ...
class RecognitionException(Exception):
message: Incomplete
recognizer: Incomplete
input: Incomplete
ctx: Incomplete
offendingToken: Incomplete
offendingState: int
def __init__(
self,
message: str | None = None,
recognizer: Recognizer | None = None,
input: InputStream | None = None,
ctx: Incomplete | None = None,
) -> None: ...
def getExpectedTokens(self): ...
class LexerNoViableAltException(RecognitionException):
startIndex: Incomplete
deadEndConfigs: Incomplete
message: str
def __init__(self, lexer, input: InputStream, startIndex: int, deadEndConfigs) -> None: ...
class NoViableAltException(RecognitionException):
deadEndConfigs: Incomplete
startToken: Incomplete
offendingToken: Incomplete
def __init__(
self,
recognizer: Incomplete,
input: Incomplete | None = None,
startToken: Incomplete | None = None,
offendingToken: Incomplete | None = None,
deadEndConfigs: Incomplete | None = None,
ctx: ParserRuleContext | None = None,
) -> None: ...
class InputMismatchException(RecognitionException):
offendingToken: Incomplete
def __init__(self, recognizer) -> None: ...
class FailedPredicateException(RecognitionException):
ruleIndex: Incomplete
predicateIndex: Incomplete
predicate: Incomplete
offendingToken: Incomplete
def __init__(self, recognizer, predicate: str | None = None, message: str | None = None) -> None: ...
def formatMessage(self, predicate: str, message: str): ...
class ParseCancellationException(CancellationException): ...

View File

@@ -0,0 +1,12 @@
from _typeshed import Incomplete
class Chunk: ...
class TagChunk(Chunk):
tag: Incomplete
label: Incomplete
def __init__(self, tag: str, label: str | None = None) -> None: ...
class TextChunk(Chunk):
text: Incomplete
def __init__(self, text: str) -> None: ...

View File

@@ -0,0 +1,16 @@
from _typeshed import Incomplete
from antlr4.tree.ParseTreePattern import ParseTreePattern as ParseTreePattern
from antlr4.tree.Tree import ParseTree as ParseTree
class ParseTreeMatch:
tree: Incomplete
pattern: Incomplete
labels: Incomplete
mismatchedNode: Incomplete
def __init__(
self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree
) -> None: ...
def get(self, label: str): ...
def getAll(self, label: str): ...
def succeeded(self): ...

View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher
from antlr4.tree.Tree import ParseTree as ParseTree
from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer
class ParseTreePattern:
matcher: Incomplete
patternRuleIndex: Incomplete
pattern: Incomplete
patternTree: Incomplete
def __init__(self, matcher: ParseTreePatternMatcher, pattern: str, patternRuleIndex: int, patternTree: ParseTree) -> None: ...
def match(self, tree: ParseTree): ...
def matches(self, tree: ParseTree): ...
def findAll(self, tree: ParseTree, xpath: str): ...

View File

@@ -0,0 +1,44 @@
from _typeshed import Incomplete
from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream
from antlr4.error.Errors import (
ParseCancellationException as ParseCancellationException,
RecognitionException as RecognitionException,
)
from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy
from antlr4.InputStream import InputStream as InputStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.ListTokenSource import ListTokenSource as ListTokenSource
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.Token import Token as Token
from antlr4.tree.Chunk import TagChunk as TagChunk, TextChunk as TextChunk
from antlr4.tree.RuleTagToken import RuleTagToken as RuleTagToken
from antlr4.tree.TokenTagToken import TokenTagToken as TokenTagToken
from antlr4.tree.Tree import ParseTree as ParseTree, RuleNode as RuleNode, TerminalNode as TerminalNode
Parser: Incomplete
ParseTreePattern: Incomplete
class CannotInvokeStartRule(Exception):
def __init__(self, e: Exception) -> None: ...
class StartRuleDoesNotConsumeFullPattern(Exception): ...
class ParseTreePatternMatcher:
lexer: Incomplete
parser: Incomplete
start: str
stop: str
escape: str
def __init__(self, lexer: Lexer, parser: Parser) -> None: ...
def setDelimiters(self, start: str, stop: str, escapeLeft: str): ...
def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ...
def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern): ...
def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ...
def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern): ...
def compileTreePattern(self, pattern: str, patternRuleIndex: int): ...
def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]): ...
def map(self, labels, label, tree) -> None: ...
def getRuleTagToken(self, tree: ParseTree): ...
def tokenize(self, pattern: str): ...
def split(self, pattern: str): ...

View File

@@ -0,0 +1,17 @@
from _typeshed import Incomplete
from antlr4.Token import Token as Token
class RuleTagToken(Token):
source: Incomplete
type: Incomplete
channel: Incomplete
start: int
stop: int
tokenIndex: int
line: int
column: int
label: Incomplete
ruleName: Incomplete
def __init__(self, ruleName: str, bypassTokenType: int, label: str | None = None) -> None: ...
def getText(self): ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
from antlr4.Token import CommonToken as CommonToken
class TokenTagToken(CommonToken):
tokenName: Incomplete
label: Incomplete
def __init__(self, tokenName: str, type: int, label: str | None = None) -> None: ...
def getText(self): ...

View File

@@ -0,0 +1,51 @@
from _typeshed import Incomplete
from antlr4.Token import Token as Token
INVALID_INTERVAL: Incomplete
class Tree: ...
class SyntaxTree(Tree): ...
class ParseTree(SyntaxTree): ...
class RuleNode(ParseTree): ...
class TerminalNode(ParseTree): ...
class ErrorNode(TerminalNode): ...
class ParseTreeVisitor:
def visit(self, tree): ...
def visitChildren(self, node): ...
def visitTerminal(self, node): ...
def visitErrorNode(self, node): ...
def defaultResult(self) -> None: ...
def aggregateResult(self, aggregate, nextResult): ...
def shouldVisitNextChild(self, node, currentResult): ...
class ParseTreeListener:
def visitTerminal(self, node: TerminalNode): ...
def visitErrorNode(self, node: ErrorNode): ...
def enterEveryRule(self, ctx): ...
def exitEveryRule(self, ctx): ...
class TerminalNodeImpl(TerminalNode):
parentCtx: Incomplete
symbol: Incomplete
def __init__(self, symbol: Token) -> None: ...
def __setattr__(self, key, value) -> None: ...
def getChild(self, i: int): ...
def getSymbol(self): ...
def getParent(self): ...
def getPayload(self): ...
def getSourceInterval(self): ...
def getChildCount(self): ...
def accept(self, visitor: ParseTreeVisitor): ...
def getText(self): ...
class ErrorNodeImpl(TerminalNodeImpl, ErrorNode):
def __init__(self, token: Token) -> None: ...
def accept(self, visitor: ParseTreeVisitor): ...
class ParseTreeWalker:
DEFAULT: Incomplete
def walk(self, listener: ParseTreeListener, t: ParseTree): ...
def enterRule(self, listener: ParseTreeListener, r: RuleNode): ...
def exitRule(self, listener: ParseTreeListener, r: RuleNode): ...

View File

@@ -0,0 +1,31 @@
from _typeshed import Incomplete
from antlr4.Token import Token as Token
from antlr4.tree.Tree import (
ErrorNode as ErrorNode,
ParseTree as ParseTree,
RuleNode as RuleNode,
TerminalNode as TerminalNode,
Tree as Tree,
)
from antlr4.Utils import escapeWhitespace as escapeWhitespace
Parser: Incomplete
class Trees:
@classmethod
def toStringTree(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None): ...
@classmethod
def getNodeText(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None): ...
@classmethod
def getChildren(cls, t: Tree): ...
@classmethod
def getAncestors(cls, t: Tree): ...
@classmethod
def findAllTokenNodes(cls, t: ParseTree, ttype: int): ...
@classmethod
def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int): ...
@classmethod
def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool): ...
@classmethod
def descendants(cls, t: ParseTree): ...

View File

@@ -0,0 +1,67 @@
from _typeshed import Incomplete
from antlr4 import (
DFA as DFA,
CommonTokenStream as CommonTokenStream,
Lexer as Lexer,
LexerATNSimulator as LexerATNSimulator,
ParserRuleContext as ParserRuleContext,
PredictionContextCache as PredictionContextCache,
TerminalNode as TerminalNode,
)
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
from antlr4.error.ErrorListener import ErrorListener as ErrorListener
from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException
from antlr4.InputStream import InputStream as InputStream
from antlr4.Parser import Parser as Parser
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.tree.Tree import ParseTree as ParseTree
from antlr4.tree.Trees import Trees as Trees
from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer
class XPath:
WILDCARD: str
NOT: str
parser: Incomplete
path: Incomplete
elements: Incomplete
def __init__(self, parser: Parser, path: str) -> None: ...
def split(self, path: str): ...
def getXPathElement(self, wordToken: Token, anywhere: bool): ...
@staticmethod
def findAll(tree: ParseTree, xpath: str, parser: Parser): ...
def evaluate(self, t: ParseTree): ...
class XPathElement:
nodeName: Incomplete
invert: bool
def __init__(self, nodeName: str) -> None: ...
class XPathRuleAnywhereElement(XPathElement):
ruleIndex: Incomplete
def __init__(self, ruleName: str, ruleIndex: int) -> None: ...
def evaluate(self, t: ParseTree): ...
class XPathRuleElement(XPathElement):
ruleIndex: Incomplete
def __init__(self, ruleName: str, ruleIndex: int) -> None: ...
def evaluate(self, t: ParseTree): ...
class XPathTokenAnywhereElement(XPathElement):
tokenType: Incomplete
def __init__(self, ruleName: str, tokenType: int) -> None: ...
def evaluate(self, t: ParseTree): ...
class XPathTokenElement(XPathElement):
tokenType: Incomplete
def __init__(self, ruleName: str, tokenType: int) -> None: ...
def evaluate(self, t: ParseTree): ...
class XPathWildcardAnywhereElement(XPathElement):
def __init__(self) -> None: ...
def evaluate(self, t: ParseTree): ...
class XPathWildcardElement(XPathElement):
def __init__(self) -> None: ...
def evaluate(self, t: ParseTree): ...

View File

@@ -0,0 +1,28 @@
from _typeshed import Incomplete
from typing import TextIO
from antlr4 import *
def serializedATN(): ...
class XPathLexer(Lexer):
atn: Incomplete
decisionsToDFA: Incomplete
TOKEN_REF: int
RULE_REF: int
ANYWHERE: int
ROOT: int
WILDCARD: int
BANG: int
ID: int
STRING: int
channelNames: Incomplete
modeNames: Incomplete
literalNames: Incomplete
symbolicNames: Incomplete
ruleNames: Incomplete
grammarFileName: str
def __init__(self, input: Incomplete | None = None, output: TextIO = ...) -> None: ...
def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ...
type: Incomplete
def ID_action(self, localctx: RuleContext, actionIndex: int): ...