Dependencies
- antlr/antlr/2.7.7
- java/11
- org.antlr/stringtemplate/3.2.1
Declarations
-
org
-
antlr
-
runtime
- ANTLRFileStream
- ANTLRInputStream
- ANTLRReaderStream
-
ANTLRStringStream
- data: char[]
- n: int
- p: int
- line: int
- charPositionInLine: int
- markDepth: int
- markers: List<CharStreamState>
- lastMarker: int
- name: String
- ANTLRStringStream(): void
- ANTLRStringStream(String): void
- ANTLRStringStream(char[], int): void
- reset(): void
- consume(): void
- LA(int): int
- LT(int): int
- index(): int
- size(): int
- mark(): int
- rewind(int): void
- rewind(): void
- release(int): void
- seek(int): void
- substring(int, int): String
- getLine(): int
- getCharPositionInLine(): int
- setLine(int): void
- setCharPositionInLine(int): void
- getSourceName(): String
- toString(): String
-
BaseRecognizer
- MEMO_RULE_FAILED: int
- MEMO_RULE_UNKNOWN: int
- INITIAL_FOLLOW_STACK_SIZE: int
- DEFAULT_TOKEN_CHANNEL: int
- HIDDEN: int
- NEXT_TOKEN_RULE_NAME: String
- state: RecognizerSharedState
- BaseRecognizer(): void
- BaseRecognizer(RecognizerSharedState): void
- reset(): void
- match(IntStream, int, BitSet): Object
- matchAny(IntStream): void
- mismatchIsUnwantedToken(IntStream, int): boolean
- mismatchIsMissingToken(IntStream, BitSet): boolean
- reportError(RecognitionException): void
- displayRecognitionError(String[], RecognitionException): void
- getErrorMessage(RecognitionException, String[]): String
- getNumberOfSyntaxErrors(): int
- getErrorHeader(RecognitionException): String
- getTokenErrorDisplay(Token): String
- emitErrorMessage(String): void
- recover(IntStream, RecognitionException): void
- beginResync(): void
- endResync(): void
- computeErrorRecoverySet(): BitSet
- computeContextSensitiveRuleFOLLOW(): BitSet
- combineFollows(boolean): BitSet
- recoverFromMismatchedToken(IntStream, int, BitSet): Object
- recoverFromMismatchedSet(IntStream, RecognitionException, BitSet): Object
- getCurrentInputSymbol(IntStream): Object
- getMissingSymbol(IntStream, RecognitionException, int, BitSet): Object
- consumeUntil(IntStream, int): void
- consumeUntil(IntStream, BitSet): void
- pushFollow(BitSet): void
- getRuleInvocationStack(): List<String>
- getRuleInvocationStack(Throwable, String): List<String>
- getBacktrackingLevel(): int
- setBacktrackingLevel(int): void
- failed(): boolean
- getTokenNames(): String[]
- getGrammarFileName(): String
- getSourceName(): String
- toStrings(List<Token>): List<String>
- getRuleMemoization(int, int): int
- alreadyParsedRule(IntStream, int): boolean
- memoize(IntStream, int, int): void
- getRuleMemoizationCacheSize(): int
- traceIn(String, int, Object): void
- traceOut(String, int, Object): void
-
BitSet
- BITS: int
- LOG_BITS: int
- MOD_MASK: int
- bits: long[]
- BitSet(): void
- BitSet(long[]): void
- BitSet(List<Integer>): void
- BitSet(int): void
- of(int): BitSet
- of(int, int): BitSet
- of(int, int, int): BitSet
- of(int, int, int, int): BitSet
- or(BitSet): BitSet
- add(int): void
- growToInclude(int): void
- orInPlace(BitSet): void
- setSize(int): void
- bitMask(int): long
- clone(): Object
- size(): int
- equals(Object): boolean
- member(int): boolean
- remove(int): void
- isNil(): boolean
- numWordsToHold(int): int
- numBits(): int
- lengthInLongWords(): int
- toArray(): int[]
- toPackedArray(): long[]
- wordNumber(int): int
- toString(): String
- toString(String[]): String
-
BufferedTokenStream
- tokenSource: TokenSource
- tokens: List<Token>
- lastMarker: int
- p: int
- range: int
- BufferedTokenStream(): void
- BufferedTokenStream(TokenSource): void
- getTokenSource(): TokenSource
- index(): int
- range(): int
- mark(): int
- release(int): void
- rewind(int): void
- rewind(): void
- reset(): void
- seek(int): void
- size(): int
- consume(): void
- sync(int): void
- fetch(int): void
- get(int): Token
- get(int, int): List<Token>
- LA(int): int
- LB(int): Token
- LT(int): Token
- setup(): void
- setTokenSource(TokenSource): void
- getTokens(): List<Token>
- getTokens(int, int): List<Token>
- getTokens(int, int, BitSet): List<Token>
- getTokens(int, int, List<Integer>): List<Token>
- getTokens(int, int, int): List<Token>
- getSourceName(): String
- toString(): String
- toString(int, int): String
- toString(Token, Token): String
- fill(): void
- CharStream
- CharStreamState
-
ClassicToken
- text: String
- type: int
- line: int
- charPositionInLine: int
- channel: int
- index: int
- ClassicToken(int): void
- ClassicToken(Token): void
- ClassicToken(int, String): void
- ClassicToken(int, String, int): void
- getType(): int
- setLine(int): void
- getText(): String
- setText(String): void
- getLine(): int
- getCharPositionInLine(): int
- setCharPositionInLine(int): void
- getChannel(): int
- setChannel(int): void
- setType(int): void
- getTokenIndex(): int
- setTokenIndex(int): void
- getInputStream(): CharStream
- setInputStream(CharStream): void
- toString(): String
-
CommonToken
- type: int
- line: int
- charPositionInLine: int
- channel: int
- input: CharStream
- text: String
- index: int
- start: int
- stop: int
- CommonToken(int): void
- CommonToken(CharStream, int, int, int, int): void
- CommonToken(int, String): void
- CommonToken(Token): void
- getType(): int
- setLine(int): void
- getText(): String
- setText(String): void
- getLine(): int
- getCharPositionInLine(): int
- setCharPositionInLine(int): void
- getChannel(): int
- setChannel(int): void
- setType(int): void
- getStartIndex(): int
- setStartIndex(int): void
- getStopIndex(): int
- setStopIndex(int): void
- getTokenIndex(): int
- setTokenIndex(int): void
- getInputStream(): CharStream
- setInputStream(CharStream): void
- toString(): String
-
CommonTokenStream
- channel: int
- CommonTokenStream(): void
- CommonTokenStream(TokenSource): void
- CommonTokenStream(TokenSource, int): void
- consume(): void
- LB(int): Token
- LT(int): Token
- skipOffTokenChannels(int): int
- skipOffTokenChannelsReverse(int): int
- reset(): void
- setup(): void
- getNumberOfOnChannelTokens(): int
- setTokenSource(TokenSource): void
-
DFA
- eot: short[]
- eof: short[]
- min: char[]
- max: char[]
- accept: short[]
- special: short[]
- transition: short[][]
- decisionNumber: int
- recognizer: BaseRecognizer
- debug: boolean
- predict(IntStream): int
- noViableAlt(int, IntStream): void
- error(NoViableAltException): void
- specialStateTransition(int, IntStream): int
- getDescription(): String
- unpackEncodedString(String): short[]
- unpackEncodedStringToUnsignedChars(String): char[]
- EarlyExitException
- FailedPredicateException
- IntStream
-
LegacyCommonTokenStream
- tokenSource: TokenSource
- tokens: List<Token>
- channelOverrideMap: Map<Integer, Integer>
- discardSet: Set<Integer>
- channel: int
- discardOffChannelTokens: boolean
- lastMarker: int
- range: int
- p: int
- LegacyCommonTokenStream(): void
- LegacyCommonTokenStream(TokenSource): void
- LegacyCommonTokenStream(TokenSource, int): void
- setTokenSource(TokenSource): void
- fillBuffer(): void
- consume(): void
- skipOffTokenChannels(int): int
- skipOffTokenChannelsReverse(int): int
- setTokenTypeChannel(int, int): void
- discardTokenType(int): void
- discardOffChannelTokens(boolean): void
- getTokens(): List<Token>
- getTokens(int, int): List<Token>
- getTokens(int, int, BitSet): List<Token>
- getTokens(int, int, List<Integer>): List<Token>
- getTokens(int, int, int): List<Token>
- LT(int): Token
- LB(int): Token
- get(int): Token
- get(int, int): List<Token>
- LA(int): int
- mark(): int
- release(int): void
- size(): int
- index(): int
- range(): int
- rewind(int): void
- rewind(): void
- reset(): void
- seek(int): void
- getTokenSource(): TokenSource
- getSourceName(): String
- toString(): String
- toString(int, int): String
- toString(Token, Token): String
-
Lexer
- input: CharStream
- Lexer(): void
- Lexer(CharStream): void
- Lexer(CharStream, RecognizerSharedState): void
- reset(): void
- nextToken(): Token
- getEOFToken(): Token
- skip(): void
- mTokens(): void
- setCharStream(CharStream): void
- getCharStream(): CharStream
- getSourceName(): String
- emit(Token): void
- emit(): Token
- match(String): void
- matchAny(): void
- match(int): void
- matchRange(int, int): void
- getLine(): int
- getCharPositionInLine(): int
- getCharIndex(): int
- getText(): String
- setText(String): void
- reportError(RecognitionException): void
- getErrorMessage(RecognitionException, String[]): String
- getCharErrorDisplay(int): String
- recover(RecognitionException): void
- traceIn(String, int): void
- traceOut(String, int): void
- MismatchedNotSetException
- MismatchedRangeException
- MismatchedSetException
- MismatchedTokenException
- MismatchedTreeNodeException
- MissingTokenException
- NoViableAltException
-
Parser
- input: TokenStream
- Parser(TokenStream): void
- Parser(TokenStream, RecognizerSharedState): void
- reset(): void
- getCurrentInputSymbol(IntStream): Object
- getMissingSymbol(IntStream, RecognitionException, int, BitSet): Object
- setTokenStream(TokenStream): void
- getTokenStream(): TokenStream
- getSourceName(): String
- traceIn(String, int): void
- traceOut(String, int): void
- ParserRuleReturnScope
- RecognitionException
-
RecognizerSharedState
- following: BitSet[]
- _fsp: int
- errorRecovery: boolean
- lastErrorIndex: int
- failed: boolean
- syntaxErrors: int
- backtracking: int
- ruleMemo: Map[]
- token: Token
- tokenStartCharIndex: int
- tokenStartLine: int
- tokenStartCharPositionInLine: int
- channel: int
- type: int
- text: String
- RecognizerSharedState(): void
- RecognizerSharedState(RecognizerSharedState): void
- RuleReturnScope
-
SerializedGrammar
- COOKIE: String
- FORMAT_VERSION: int
- name: String
- type: char
- rules: List<Rule>
- Rule
- Node
- Block
- TokenRef
- RuleRef
- SerializedGrammar(String): void
- readFile(DataInputStream): void
- readRules(DataInputStream, int): List<Rule>
- readRule(DataInputStream): Rule
- readBlock(DataInputStream): Block
- readAlt(DataInputStream): List<Node>
- readString(DataInputStream): String
- toString(): String
-
Token
- DOWN: int
- UP: int
- EOR_TOKEN_TYPE: int
- MIN_TOKEN_TYPE: int
- EOF: int
- INVALID_TOKEN_TYPE: int
- INVALID_TOKEN: Token
- SKIP_TOKEN: Token
- DEFAULT_CHANNEL: int
- HIDDEN_CHANNEL: int
- getText(): String
- setText(String): void
- getType(): int
- setType(int): void
- getLine(): int
- setLine(int): void
- getCharPositionInLine(): int
- setCharPositionInLine(int): void
- getChannel(): int
- setChannel(int): void
- getTokenIndex(): int
- setTokenIndex(int): void
- getInputStream(): CharStream
- setInputStream(CharStream): void
-
TokenRewriteStream
- DEFAULT_PROGRAM_NAME: String
- PROGRAM_INIT_SIZE: int
- MIN_TOKEN_INDEX: int
- RewriteOperation
- InsertBeforeOp
- ReplaceOp
- programs: Map<String, List<RewriteOperation>>
- lastRewriteTokenIndexes: Map<String, Integer>
- TokenRewriteStream(): void
- init(): void
- TokenRewriteStream(TokenSource): void
- TokenRewriteStream(TokenSource, int): void
- rollback(int): void
- rollback(String, int): void
- deleteProgram(): void
- deleteProgram(String): void
- insertAfter(Token, Object): void
- insertAfter(int, Object): void
- insertAfter(String, Token, Object): void
- insertAfter(String, int, Object): void
- insertBefore(Token, Object): void
- insertBefore(int, Object): void
- insertBefore(String, Token, Object): void
- insertBefore(String, int, Object): void
- replace(int, Object): void
- replace(int, int, Object): void
- replace(Token, Object): void
- replace(Token, Token, Object): void
- replace(String, int, int, Object): void
- replace(String, Token, Token, Object): void
- delete(int): void
- delete(int, int): void
- delete(Token): void
- delete(Token, Token): void
- delete(String, int, int): void
- delete(String, Token, Token): void
- getLastRewriteTokenIndex(): int
- getLastRewriteTokenIndex(String): int
- setLastRewriteTokenIndex(String, int): void
- getProgram(String): List<RewriteOperation>
- initializeProgram(String): List<RewriteOperation>
- toOriginalString(): String
- toOriginalString(int, int): String
- toString(): String
- toString(String): String
- toString(int, int): String
- toString(String, int, int): String
- reduceToSingleOperationPerIndex(List<RewriteOperation>): Map<Integer, RewriteOperation>
- catOpText(Object, Object): String
- getKindOfOps(List<RewriteOperation>, Class<RewriteOperation>): List<RewriteOperation>
- getKindOfOps(List<RewriteOperation>, Class<RewriteOperation>, int): List<RewriteOperation>
- toDebugString(): String
- toDebugString(int, int): String
- TokenSource
- TokenStream
- UnbufferedTokenStream
- UnwantedTokenException
- debug
- misc
-
tree
- BaseTree
- BaseTreeAdaptor
- BufferedTreeNodeStream
- CommonErrorNode
- CommonTree
- CommonTreeAdaptor
- CommonTreeNodeStream
- DOTTreeGenerator
- ParseTree
- PositionTrackingStream
- RewriteCardinalityException
- RewriteEarlyExitException
- RewriteEmptyStreamException
- RewriteRuleElementStream
- RewriteRuleNodeStream
- RewriteRuleSubtreeStream
- RewriteRuleTokenStream
- Tree
- TreeAdaptor
- TreeFilter
- TreeIterator
- TreeNodeStream
- TreeParser
- TreePatternLexer
- TreePatternParser
- TreeRewriter
- TreeRuleReturnScope
- TreeVisitor
- TreeVisitorAction
- TreeWizard
-
runtime
-
antlr