Dependencies
Declarations
-
org
-
antlr
-
v4
-
runtime
-
ANTLRErrorListener
-
syntaxError(Recognizer<Object, ATNSimulator>, Object, int, int, String, RecognitionException): void
-
reportAmbiguity(Parser, DFA, int, int, boolean, BitSet, ATNConfigSet): void
-
reportAttemptingFullContext(Parser, DFA, int, int, BitSet, ATNConfigSet): void
-
reportContextSensitivity(Parser, DFA, int, int, int, ATNConfigSet): void
-
-
ANTLRErrorStrategy
-
ANTLRFileStream
-
ANTLRInputStream
-
READ_BUFFER_SIZE: int
-
INITIAL_BUFFER_SIZE: int
-
data: char[]
-
n: int
-
p: int
-
name: String
-
ANTLRInputStream(): void
-
ANTLRInputStream(String): void
-
ANTLRInputStream(char[], int): void
-
ANTLRInputStream(Reader): void
-
ANTLRInputStream(Reader, int): void
-
ANTLRInputStream(Reader, int, int): void
-
ANTLRInputStream(InputStream): void
-
ANTLRInputStream(InputStream, int): void
-
ANTLRInputStream(InputStream, int, int): void
-
load(Reader, int, int): void
-
reset(): void
-
consume(): void
-
LA(int): int
-
LT(int): int
-
index(): int
-
size(): int
-
mark(): int
-
release(int): void
-
seek(int): void
-
getText(Interval): String
-
getSourceName(): String
-
toString(): String
-
-
BailErrorStrategy
-
BaseErrorListener
-
syntaxError(Recognizer<Object, ATNSimulator>, Object, int, int, String, RecognitionException): void
-
reportAmbiguity(Parser, DFA, int, int, boolean, BitSet, ATNConfigSet): void
-
reportAttemptingFullContext(Parser, DFA, int, int, BitSet, ATNConfigSet): void
-
reportContextSensitivity(Parser, DFA, int, int, int, ATNConfigSet): void
-
-
BufferedTokenStream
-
tokenSource: TokenSource
-
tokens: List<Token>
-
p: int
-
fetchedEOF: boolean
-
BufferedTokenStream(TokenSource): void
-
getTokenSource(): TokenSource
-
index(): int
-
mark(): int
-
release(int): void
-
reset(): void
-
seek(int): void
-
size(): int
-
consume(): void
-
sync(int): boolean
-
fetch(int): int
-
get(int): Token
-
get(int, int): List<Token>
-
LA(int): int
-
LB(int): Token
-
LT(int): Token
-
adjustSeekIndex(int): int
-
lazyInit(): void
-
setup(): void
-
setTokenSource(TokenSource): void
-
getTokens(): List<Token>
-
getTokens(int, int): List<Token>
-
getTokens(int, int, Set<Integer>): List<Token>
-
getTokens(int, int, int): List<Token>
-
nextTokenOnChannel(int, int): int
-
previousTokenOnChannel(int, int): int
-
getHiddenTokensToRight(int, int): List<Token>
-
getHiddenTokensToRight(int): List<Token>
-
getHiddenTokensToLeft(int, int): List<Token>
-
getHiddenTokensToLeft(int): List<Token>
-
filterForChannel(int, int, int): List<Token>
-
getSourceName(): String
-
getText(): String
-
getText(Interval): String
-
getText(RuleContext): String
-
getText(Token, Token): String
-
fill(): void
-
-
CharStream
-
CharStreams
-
DEFAULT_BUFFER_SIZE: int
-
CharStreams(): void
-
fromPath(Path): CharStream
-
fromPath(Path, Charset): CharStream
-
fromFileName(String): CharStream
-
fromFileName(String, Charset): CharStream
-
fromStream(InputStream): CharStream
-
fromStream(InputStream, Charset): CharStream
-
fromStream(InputStream, Charset, long): CharStream
-
fromChannel(ReadableByteChannel): CharStream
-
fromChannel(ReadableByteChannel, Charset): CharStream
-
fromReader(Reader): CodePointCharStream
-
fromReader(Reader, String): CodePointCharStream
-
fromString(String): CodePointCharStream
-
fromString(String, String): CodePointCharStream
-
fromChannel(ReadableByteChannel, int, CodingErrorAction, String): CodePointCharStream
-
fromChannel(ReadableByteChannel, Charset, int, CodingErrorAction, String, long): CodePointCharStream
-
-
CodePointBuffer
-
Type
-
type: Type
-
byteBuffer: ByteBuffer
-
charBuffer: CharBuffer
-
intBuffer: IntBuffer
-
CodePointBuffer(Type, ByteBuffer, CharBuffer, IntBuffer): void
-
withBytes(ByteBuffer): CodePointBuffer
-
withChars(CharBuffer): CodePointBuffer
-
withInts(IntBuffer): CodePointBuffer
-
position(): int
-
position(int): void
-
remaining(): int
-
get(int): int
-
getType(): Type
-
arrayOffset(): int
-
byteArray(): byte[]
-
charArray(): char[]
-
intArray(): int[]
-
builder(int): Builder
-
Builder
-
-
CodePointCharStream
-
size: int
-
name: String
-
position: int
-
CodePointCharStream(int, int, String): void
-
getInternalStorage(): Object
-
fromBuffer(CodePointBuffer): CodePointCharStream
-
fromBuffer(CodePointBuffer, String): CodePointCharStream
-
consume(): void
-
index(): int
-
size(): int
-
mark(): int
-
release(int): void
-
seek(int): void
-
getSourceName(): String
-
toString(): String
-
CodePoint8BitCharStream
-
CodePoint16BitCharStream
-
CodePoint32BitCharStream
-
-
CommonToken
-
EMPTY_SOURCE: Pair<TokenSource, CharStream>
-
type: int
-
line: int
-
charPositionInLine: int
-
channel: int
-
source: Pair<TokenSource, CharStream>
-
text: String
-
index: int
-
start: int
-
stop: int
-
CommonToken(int): void
-
CommonToken(Pair<TokenSource, CharStream>, int, int, int, int): void
-
CommonToken(int, String): void
-
CommonToken(Token): void
-
getType(): int
-
setLine(int): void
-
getText(): String
-
setText(String): void
-
getLine(): int
-
getCharPositionInLine(): int
-
setCharPositionInLine(int): void
-
getChannel(): int
-
setChannel(int): void
-
setType(int): void
-
getStartIndex(): int
-
setStartIndex(int): void
-
getStopIndex(): int
-
setStopIndex(int): void
-
getTokenIndex(): int
-
setTokenIndex(int): void
-
getTokenSource(): TokenSource
-
getInputStream(): CharStream
-
toString(): String
-
toString(Recognizer): String
-
-
CommonTokenFactory
-
CommonTokenStream
-
ConsoleErrorListener
-
DefaultErrorStrategy
-
errorRecoveryMode: boolean
-
lastErrorIndex: int
-
lastErrorStates: IntervalSet
-
nextTokensContext: ParserRuleContext
-
nextTokensState: int
-
reset(Parser): void
-
beginErrorCondition(Parser): void
-
inErrorRecoveryMode(Parser): boolean
-
endErrorCondition(Parser): void
-
reportMatch(Parser): void
-
reportError(Parser, RecognitionException): void
-
recover(Parser, RecognitionException): void
-
sync(Parser): void
-
reportNoViableAlternative(Parser, NoViableAltException): void
-
reportInputMismatch(Parser, InputMismatchException): void
-
reportFailedPredicate(Parser, FailedPredicateException): void
-
reportUnwantedToken(Parser): void
-
reportMissingToken(Parser): void
-
recoverInline(Parser): Token
-
singleTokenInsertion(Parser): boolean
-
singleTokenDeletion(Parser): Token
-
getMissingSymbol(Parser): Token
-
getExpectedTokens(Parser): IntervalSet
-
getTokenErrorDisplay(Token): String
-
getSymbolText(Token): String
-
getSymbolType(Token): int
-
escapeWSAndQuote(String): String
-
getErrorRecoverySet(Parser): IntervalSet
-
consumeUntil(Parser, IntervalSet): void
-
-
DiagnosticErrorListener
-
exactOnly: boolean
-
DiagnosticErrorListener(): void
-
DiagnosticErrorListener(boolean): void
-
reportAmbiguity(Parser, DFA, int, int, boolean, BitSet, ATNConfigSet): void
-
reportAttemptingFullContext(Parser, DFA, int, int, BitSet, ATNConfigSet): void
-
reportContextSensitivity(Parser, DFA, int, int, int, ATNConfigSet): void
-
getDecisionDescription(Parser, DFA): String
-
getConflictingAlts(BitSet, ATNConfigSet): BitSet
-
-
FailedPredicateException
-
InputMismatchException
-
IntStream
-
InterpreterRuleContext
-
Lexer
-
DEFAULT_MODE: int
-
MORE: int
-
SKIP: int
-
DEFAULT_TOKEN_CHANNEL: int
-
HIDDEN: int
-
MIN_CHAR_VALUE: int
-
MAX_CHAR_VALUE: int
-
_input: CharStream
-
_tokenFactorySourcePair: Pair<TokenSource, CharStream>
-
_factory: TokenFactory<Token>
-
_token: Token
-
_tokenStartCharIndex: int
-
_tokenStartLine: int
-
_tokenStartCharPositionInLine: int
-
_hitEOF: boolean
-
_channel: int
-
_type: int
-
_modeStack: IntegerStack
-
_mode: int
-
_text: String
-
Lexer(): void
-
Lexer(CharStream): void
-
reset(): void
-
nextToken(): Token
-
skip(): void
-
more(): void
-
mode(int): void
-
pushMode(int): void
-
popMode(): int
-
setTokenFactory(TokenFactory<Token>): void
-
getTokenFactory(): TokenFactory<Token>
-
setInputStream(IntStream): void
-
getSourceName(): String
-
getInputStream(): CharStream
-
emit(Token): void
-
emit(): Token
-
emitEOF(): Token
-
getLine(): int
-
getCharPositionInLine(): int
-
setLine(int): void
-
setCharPositionInLine(int): void
-
getCharIndex(): int
-
getText(): String
-
setText(String): void
-
getToken(): Token
-
setToken(Token): void
-
setType(int): void
-
getType(): int
-
setChannel(int): void
-
getChannel(): int
-
getChannelNames(): String[]
-
getModeNames(): String[]
-
getTokenNames(): String[]
-
getAllTokens(): List<Token>
-
recover(LexerNoViableAltException): void
-
notifyListeners(LexerNoViableAltException): void
-
getErrorDisplay(String): String
-
getErrorDisplay(int): String
-
getCharErrorDisplay(int): String
-
recover(RecognitionException): void
-
-
LexerInterpreter
-
grammarFileName: String
-
atn: ATN
-
tokenNames: String[]
-
ruleNames: String[]
-
channelNames: String[]
-
modeNames: String[]
-
vocabulary: Vocabulary
-
_decisionToDFA: DFA[]
-
_sharedContextCache: PredictionContextCache
-
LexerInterpreter(String, Collection<String>, Collection<String>, Collection<String>, ATN, CharStream): void
-
LexerInterpreter(String, Vocabulary, Collection<String>, Collection<String>, ATN, CharStream): void
-
LexerInterpreter(String, Vocabulary, Collection<String>, Collection<String>, Collection<String>, ATN, CharStream): void
-
getATN(): ATN
-
getGrammarFileName(): String
-
getTokenNames(): String[]
-
getRuleNames(): String[]
-
getChannelNames(): String[]
-
getModeNames(): String[]
-
getVocabulary(): Vocabulary
-
-
LexerNoViableAltException
-
ListTokenSource
-
tokens: List<Token>
-
sourceName: String
-
i: int
-
eofToken: Token
-
_factory: TokenFactory<Token>
-
ListTokenSource(List<Token>): void
-
ListTokenSource(List<Token>, String): void
-
getCharPositionInLine(): int
-
nextToken(): Token
-
getLine(): int
-
getInputStream(): CharStream
-
getSourceName(): String
-
setTokenFactory(TokenFactory<Token>): void
-
getTokenFactory(): TokenFactory<Token>
-
-
NoViableAltException
-
Parser
-
TraceListener
-
TrimToSizeListener
-
bypassAltsAtnCache: Map<String, ATN>
-
_errHandler: ANTLRErrorStrategy
-
_input: TokenStream
-
_precedenceStack: IntegerStack
-
class initializer
-
_ctx: ParserRuleContext
-
_buildParseTrees: boolean
-
_tracer: TraceListener
-
_parseListeners: List<ParseTreeListener>
-
_syntaxErrors: int
-
matchedEOF: boolean
-
Parser(TokenStream): void
-
reset(): void
-
match(int): Token
-
matchWildcard(): Token
-
setBuildParseTree(boolean): void
-
getBuildParseTree(): boolean
-
setTrimParseTree(boolean): void
-
getTrimParseTree(): boolean
-
getParseListeners(): List<ParseTreeListener>
-
addParseListener(ParseTreeListener): void
-
removeParseListener(ParseTreeListener): void
-
removeParseListeners(): void
-
triggerEnterRuleEvent(): void
-
triggerExitRuleEvent(): void
-
getNumberOfSyntaxErrors(): int
-
getTokenFactory(): TokenFactory<Token>
-
setTokenFactory(TokenFactory<Token>): void
-
getATNWithBypassAlts(): ATN
-
compileParseTreePattern(String, int): ParseTreePattern
-
compileParseTreePattern(String, int, Lexer): ParseTreePattern
-
getErrorHandler(): ANTLRErrorStrategy
-
setErrorHandler(ANTLRErrorStrategy): void
-
getInputStream(): TokenStream
-
setInputStream(IntStream): void
-
getTokenStream(): TokenStream
-
setTokenStream(TokenStream): void
-
getCurrentToken(): Token
-
notifyErrorListeners(String): void
-
notifyErrorListeners(Token, String, RecognitionException): void
-
consume(): Token
-
createTerminalNode(ParserRuleContext, Token): TerminalNode
-
createErrorNode(ParserRuleContext, Token): ErrorNode
-
addContextToParseTree(): void
-
enterRule(ParserRuleContext, int, int): void
-
exitRule(): void
-
enterOuterAlt(ParserRuleContext, int): void
-
getPrecedence(): int
-
enterRecursionRule(ParserRuleContext, int): void
-
enterRecursionRule(ParserRuleContext, int, int, int): void
-
pushNewRecursionContext(ParserRuleContext, int, int): void
-
unrollRecursionContexts(ParserRuleContext): void
-
getInvokingContext(int): ParserRuleContext
-
getContext(): ParserRuleContext
-
setContext(ParserRuleContext): void
-
precpred(RuleContext, int): boolean
-
inContext(String): boolean
-
isExpectedToken(int): boolean
-
isMatchedEOF(): boolean
-
getExpectedTokens(): IntervalSet
-
getExpectedTokensWithinCurrentRule(): IntervalSet
-
getRuleIndex(String): int
-
getRuleContext(): ParserRuleContext
-
getRuleInvocationStack(): List<String>
-
getRuleInvocationStack(RuleContext): List<String>
-
getDFAStrings(): List<String>
-
dumpDFA(): void
-
getSourceName(): String
-
getParseInfo(): ParseInfo
-
setProfile(boolean): void
-
setTrace(boolean): void
-
isTrace(): boolean
-
-
ParserInterpreter
-
grammarFileName: String
-
atn: ATN
-
decisionToDFA: DFA[]
-
sharedContextCache: PredictionContextCache
-
tokenNames: String[]
-
ruleNames: String[]
-
vocabulary: Vocabulary
-
_parentContextStack: Deque<Pair<ParserRuleContext, Integer>>
-
overrideDecision: int
-
overrideDecisionInputIndex: int
-
overrideDecisionAlt: int
-
overrideDecisionReached: boolean
-
overrideDecisionRoot: InterpreterRuleContext
-
rootContext: InterpreterRuleContext
-
ParserInterpreter(String, Collection<String>, Collection<String>, ATN, TokenStream): void
-
ParserInterpreter(String, Vocabulary, Collection<String>, ATN, TokenStream): void
-
reset(): void
-
getATN(): ATN
-
getTokenNames(): String[]
-
getVocabulary(): Vocabulary
-
getRuleNames(): String[]
-
getGrammarFileName(): String
-
parse(int): ParserRuleContext
-
enterRecursionRule(ParserRuleContext, int, int, int): void
-
getATNState(): ATNState
-
visitState(ATNState): void
-
visitDecisionState(DecisionState): int
-
createInterpreterRuleContext(ParserRuleContext, int, int): InterpreterRuleContext
-
visitRuleStopState(ATNState): void
-
addDecisionOverride(int, int, int): void
-
getOverrideDecisionRoot(): InterpreterRuleContext
-
recover(RecognitionException): void
-
recoverInline(): Token
-
getRootContext(): InterpreterRuleContext
-
-
ParserRuleContext
-
children: List<ParseTree>
-
start: Token
-
stop: Token
-
exception: RecognitionException
-
ParserRuleContext(): void
-
copyFrom(ParserRuleContext): void
-
ParserRuleContext(ParserRuleContext, int): void
-
enterRule(ParseTreeListener): void
-
exitRule(ParseTreeListener): void
-
addAnyChild(ParseTree): ParseTree
-
addChild(RuleContext): RuleContext
-
addChild(TerminalNode): TerminalNode
-
addErrorNode(ErrorNode): ErrorNode
-
addChild(Token): TerminalNode
-
addErrorNode(Token): ErrorNode
-
removeLastChild(): void
-
getParent(): ParserRuleContext
-
getChild(int): ParseTree
-
getChild(Class<ParseTree>, int): ParseTree
-
getToken(int, int): TerminalNode
-
getTokens(int): List<TerminalNode>
-
getRuleContext(Class<ParserRuleContext>, int): ParserRuleContext
-
getRuleContexts(Class<ParserRuleContext>): List<ParserRuleContext>
-
getChildCount(): int
-
getSourceInterval(): Interval
-
getStart(): Token
-
getStop(): Token
-
toInfoString(Parser): String
-
-
ProxyErrorListener
-
delegates: Collection<ANTLRErrorListener>
-
ProxyErrorListener(Collection<ANTLRErrorListener>): void
-
syntaxError(Recognizer<Object, ATNSimulator>, Object, int, int, String, RecognitionException): void
-
reportAmbiguity(Parser, DFA, int, int, boolean, BitSet, ATNConfigSet): void
-
reportAttemptingFullContext(Parser, DFA, int, int, BitSet, ATNConfigSet): void
-
reportContextSensitivity(Parser, DFA, int, int, int, ATNConfigSet): void
-
-
RecognitionException
-
recognizer: Recognizer<Object, ATNSimulator>
-
ctx: RuleContext
-
input: IntStream
-
offendingToken: Token
-
offendingState: int
-
RecognitionException(Recognizer<Object, ATNSimulator>, IntStream, ParserRuleContext): void
-
RecognitionException(String, Recognizer<Object, ATNSimulator>, IntStream, ParserRuleContext): void
-
getOffendingState(): int
-
setOffendingState(int): void
-
getExpectedTokens(): IntervalSet
-
getCtx(): RuleContext
-
getInputStream(): IntStream
-
getOffendingToken(): Token
-
setOffendingToken(Token): void
-
getRecognizer(): Recognizer<Object, ATNSimulator>
-
-
Recognizer
-
EOF: int
-
tokenTypeMapCache: Map<Vocabulary, Map<String, Integer>>
-
ruleIndexMapCache: Map<String[], Map<String, Integer>>
-
_listeners: List<ANTLRErrorListener>
-
_interp: ATNSimulator
-
_stateNumber: int
-
getTokenNames(): String[]
-
getRuleNames(): String[]
-
getVocabulary(): Vocabulary
-
getTokenTypeMap(): Map<String, Integer>
-
getRuleIndexMap(): Map<String, Integer>
-
getTokenType(String): int
-
getSerializedATN(): String
-
getGrammarFileName(): String
-
getATN(): ATN
-
getInterpreter(): ATNSimulator
-
getParseInfo(): ParseInfo
-
setInterpreter(ATNSimulator): void
-
getErrorHeader(RecognitionException): String
-
getTokenErrorDisplay(Token): String
-
addErrorListener(ANTLRErrorListener): void
-
removeErrorListener(ANTLRErrorListener): void
-
removeErrorListeners(): void
-
getErrorListeners(): List<ANTLRErrorListener>
-
getErrorListenerDispatch(): ANTLRErrorListener
-
sempred(RuleContext, int, int): boolean
-
precpred(RuleContext, int): boolean
-
action(RuleContext, int, int): void
-
getState(): int
-
setState(int): void
-
getInputStream(): IntStream
-
setInputStream(IntStream): void
-
getTokenFactory(): TokenFactory<Token>
-
setTokenFactory(TokenFactory<Token>): void
-
-
RuleContext
-
EMPTY: ParserRuleContext
-
parent: RuleContext
-
invokingState: int
-
RuleContext(): void
-
RuleContext(RuleContext, int): void
-
depth(): int
-
isEmpty(): boolean
-
getSourceInterval(): Interval
-
getRuleContext(): RuleContext
-
getParent(): RuleContext
-
getPayload(): RuleContext
-
getText(): String
-
getRuleIndex(): int
-
getAltNumber(): int
-
setAltNumber(int): void
-
setParent(RuleContext): void
-
getChild(int): ParseTree
-
getChildCount(): int
-
accept(ParseTreeVisitor<Object>): Object
-
toStringTree(Parser): String
-
toStringTree(List<String>): String
-
toStringTree(): String
-
toString(): String
-
toString(Recognizer<Object, ATNSimulator>): String
-
toString(List<String>): String
-
toString(Recognizer<Object, ATNSimulator>, RuleContext): String
-
toString(List<String>, RuleContext): String
-
-
RuleContextWithAltNum
-
RuntimeMetaData
-
Token
-
INVALID_TYPE: int
-
EPSILON: int
-
MIN_USER_TOKEN_TYPE: int
-
EOF: int
-
DEFAULT_CHANNEL: int
-
HIDDEN_CHANNEL: int
-
MIN_USER_CHANNEL_VALUE: int
-
getText(): String
-
getType(): int
-
getLine(): int
-
getCharPositionInLine(): int
-
getChannel(): int
-
getTokenIndex(): int
-
getStartIndex(): int
-
getStopIndex(): int
-
getTokenSource(): TokenSource
-
getInputStream(): CharStream
-
-
TokenFactory
-
TokenSource
-
TokenStream
-
TokenStreamRewriter
-
DEFAULT_PROGRAM_NAME: String
-
PROGRAM_INIT_SIZE: int
-
MIN_TOKEN_INDEX: int
-
RewriteOperation
-
InsertBeforeOp
-
InsertAfterOp
-
ReplaceOp
-
tokens: TokenStream
-
programs: Map<String, List<RewriteOperation>>
-
lastRewriteTokenIndexes: Map<String, Integer>
-
TokenStreamRewriter(TokenStream): void
-
getTokenStream(): TokenStream
-
rollback(int): void
-
rollback(String, int): void
-
deleteProgram(): void
-
deleteProgram(String): void
-
insertAfter(Token, Object): void
-
insertAfter(int, Object): void
-
insertAfter(String, Token, Object): void
-
insertAfter(String, int, Object): void
-
insertBefore(Token, Object): void
-
insertBefore(int, Object): void
-
insertBefore(String, Token, Object): void
-
insertBefore(String, int, Object): void
-
replace(int, Object): void
-
replace(int, int, Object): void
-
replace(Token, Object): void
-
replace(Token, Token, Object): void
-
replace(String, int, int, Object): void
-
replace(String, Token, Token, Object): void
-
delete(int): void
-
delete(int, int): void
-
delete(Token): void
-
delete(Token, Token): void
-
delete(String, int, int): void
-
delete(String, Token, Token): void
-
getLastRewriteTokenIndex(): int
-
getLastRewriteTokenIndex(String): int
-
setLastRewriteTokenIndex(String, int): void
-
getProgram(String): List<RewriteOperation>
-
initializeProgram(String): List<RewriteOperation>
-
getText(): String
-
getText(String): String
-
getText(Interval): String
-
getText(String, Interval): String
-
reduceToSingleOperationPerIndex(List<RewriteOperation>): Map<Integer, RewriteOperation>
-
catOpText(Object, Object): String
-
getKindOfOps(List<RewriteOperation>, Class<RewriteOperation>, int): List<RewriteOperation>
-
-
UnbufferedCharStream
-
data: int[]
-
n: int
-
p: int
-
numMarkers: int
-
lastChar: int
-
lastCharBufferStart: int
-
currentCharIndex: int
-
input: Reader
-
name: String
-
UnbufferedCharStream(): void
-
UnbufferedCharStream(int): void
-
UnbufferedCharStream(InputStream): void
-
UnbufferedCharStream(Reader): void
-
UnbufferedCharStream(InputStream, int): void
-
UnbufferedCharStream(InputStream, int, Charset): void
-
UnbufferedCharStream(Reader, int): void
-
consume(): void
-
sync(int): void
-
fill(int): int
-
nextChar(): int
-
add(int): void
-
LA(int): int
-
mark(): int
-
release(int): void
-
index(): int
-
seek(int): void
-
size(): int
-
getSourceName(): String
-
getText(Interval): String
-
getBufferStartIndex(): int
-
-
UnbufferedTokenStream
-
tokenSource: TokenSource
-
tokens: Token[]
-
n: int
-
p: int
-
numMarkers: int
-
lastToken: Token
-
lastTokenBufferStart: Token
-
currentTokenIndex: int
-
UnbufferedTokenStream(TokenSource): void
-
UnbufferedTokenStream(TokenSource, int): void
-
get(int): Token
-
LT(int): Token
-
LA(int): int
-
getTokenSource(): TokenSource
-
getText(): String
-
getText(RuleContext): String
-
getText(Token, Token): String
-
consume(): void
-
sync(int): void
-
fill(int): int
-
add(Token): void
-
mark(): int
-
release(int): void
-
index(): int
-
seek(int): void
-
size(): int
-
getSourceName(): String
-
getText(Interval): String
-
getBufferStartIndex(): int
-
-
Vocabulary
-
VocabularyImpl
-
EMPTY_NAMES: String[]
-
EMPTY_VOCABULARY: VocabularyImpl
-
literalNames: String[]
-
symbolicNames: String[]
-
displayNames: String[]
-
maxTokenType: int
-
VocabularyImpl(String[], String[]): void
-
VocabularyImpl(String[], String[], String[]): void
-
fromTokenNames(String[]): Vocabulary
-
getMaxTokenType(): int
-
getLiteralName(int): String
-
getSymbolicName(int): String
-
getDisplayName(int): String
-
-
WritableToken
-
atn
-
ATN
-
ATNConfig
-
ATNConfigSet
-
ATNDeserializationOptions
-
ATNDeserializer
-
ATNSerializer
-
ATNSimulator
-
ATNState
-
ATNType
-
AbstractPredicateTransition
-
ActionTransition
-
AmbiguityInfo
-
ArrayPredictionContext
-
AtomTransition
-
BasicBlockStartState
-
BasicState
-
BlockEndState
-
BlockStartState
-
CodePointTransitions
-
ContextSensitivityInfo
-
DecisionEventInfo
-
DecisionInfo
-
DecisionState
-
EmptyPredictionContext
-
EpsilonTransition
-
ErrorInfo
-
LL1Analyzer
-
LexerATNConfig
-
LexerATNSimulator
-
LexerAction
-
LexerActionExecutor
-
LexerActionType
-
LexerChannelAction
-
LexerCustomAction
-
LexerIndexedCustomAction
-
LexerModeAction
-
LexerMoreAction
-
LexerPopModeAction
-
LexerPushModeAction
-
LexerSkipAction
-
LexerTypeAction
-
LookaheadEventInfo
-
LoopEndState
-
NotSetTransition
-
OrderedATNConfigSet
-
ParseInfo
-
ParserATNSimulator
-
PlusBlockStartState
-
PlusLoopbackState
-
PrecedencePredicateTransition
-
PredicateEvalInfo
-
PredicateTransition
-
PredictionContext
-
PredictionContextCache
-
PredictionMode
-
ProfilingATNSimulator
-
RangeTransition
-
RuleStartState
-
RuleStopState
-
RuleTransition
-
SemanticContext
-
SetTransition
-
SingletonPredictionContext
-
StarBlockStartState
-
StarLoopEntryState
-
StarLoopbackState
-
TokensStartState
-
Transition
-
WildcardTransition
-
- dfa
-
misc
-
AbstractEqualityComparator
-
Array2DHashSet
-
DoubleKeyMap
-
EqualityComparator
-
FlexibleHashMap
-
IntSet
-
IntegerList
-
IntegerStack
-
InterpreterDataReader
-
Interval
-
IntervalSet
-
LogManager
-
MultiMap
-
MurmurHash
-
NotNull
-
ObjectEqualityComparator
-
OrderedHashSet
-
Pair
-
ParseCancellationException
-
Predicate
-
TestRig
-
Triple
-
Utils
-
- tree
-
-
runtime
-
v4
-
antlr