Commit 9f961547 authored by LiuTaowen Tony's avatar LiuTaowen Tony
Browse files

copied from my personal repo

parent f2fbbb05
token literal names:
null
'['
']'
'('
')'
'forall'
'exist'
'~'
'and'
'or'
'->'
'<->'
','
null
' '
token symbolic names:
null
null
null
null
null
null
null
null
null
null
null
null
null
ATOM
WHITESPACE
rule names:
expr
atoms
atom
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 16, 57, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 5, 2, 29, 10, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 7, 2, 43, 10, 2, 12, 2, 14, 2, 46, 11, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 53, 10, 3, 3, 4, 3, 4, 3, 4, 2, 3, 2, 5, 2, 4, 6, 2, 2, 2, 62, 2, 28, 3, 2, 2, 2, 4, 52, 3, 2, 2, 2, 6, 54, 3, 2, 2, 2, 8, 9, 8, 2, 1, 2, 9, 10, 5, 6, 4, 2, 10, 11, 7, 3, 2, 2, 11, 12, 5, 4, 3, 2, 12, 13, 7, 4, 2, 2, 13, 29, 3, 2, 2, 2, 14, 15, 7, 5, 2, 2, 15, 16, 5, 2, 2, 2, 16, 17, 7, 6, 2, 2, 17, 29, 3, 2, 2, 2, 18, 19, 7, 7, 2, 2, 19, 20, 5, 6, 4, 2, 20, 21, 5, 2, 2, 9, 21, 29, 3, 2, 2, 2, 22, 23, 7, 8, 2, 2, 23, 24, 5, 6, 4, 2, 24, 25, 5, 2, 2, 8, 25, 29, 3, 2, 2, 2, 26, 27, 7, 9, 2, 2, 27, 29, 5, 2, 2, 7, 28, 8, 3, 2, 2, 2, 28, 14, 3, 2, 2, 2, 28, 18, 3, 2, 2, 2, 28, 22, 3, 2, 2, 2, 28, 26, 3, 2, 2, 2, 29, 44, 3, 2, 2, 2, 30, 31, 12, 6, 2, 2, 31, 32, 7, 10, 2, 2, 32, 43, 5, 2, 2, 7, 33, 34, 12, 5, 2, 2, 34, 35, 7, 11, 2, 2, 35, 43, 5, 2, 2, 6, 36, 37, 12, 4, 2, 2, 37, 38, 7, 12, 2, 2, 38, 43, 5, 2, 2, 5, 39, 40, 12, 3, 2, 2, 40, 41, 7, 13, 2, 2, 41, 43, 5, 2, 2, 4, 42, 30, 3, 2, 2, 2, 42, 33, 3, 2, 2, 2, 42, 36, 3, 2, 2, 2, 42, 39, 3, 2, 2, 2, 43, 46, 3, 2, 2, 2, 44, 42, 3, 2, 2, 2, 44, 45, 3, 2, 2, 2, 45, 3, 3, 2, 2, 2, 46, 44, 3, 2, 2, 2, 47, 53, 5, 6, 4, 2, 48, 49, 5, 6, 4, 2, 49, 50, 7, 14, 2, 2, 50, 51, 5, 4, 3, 2, 51, 53, 3, 2, 2, 2, 52, 47, 3, 2, 2, 2, 52, 48, 3, 2, 2, 2, 53, 5, 3, 2, 2, 2, 54, 55, 7, 15, 2, 2, 55, 7, 3, 2, 2, 2, 6, 28, 42, 44, 52]
\ No newline at end of file
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
ATOM=13
WHITESPACE=14
'['=1
']'=2
'('=3
')'=4
'forall'=5
'exist'=6
'~'=7
'and'=8
'or'=9
'->'=10
'<->'=11
','=12
' '=14
token literal names:
null
'['
']'
'('
')'
'forall'
'exist'
'~'
'and'
'or'
'->'
'<->'
','
null
' '
token symbolic names:
null
null
null
null
null
null
null
null
null
null
null
null
null
ATOM
WHITESPACE
rule names:
T__0
T__1
T__2
T__3
T__4
T__5
T__6
T__7
T__8
T__9
T__10
T__11
ATOM
WHITESPACE
channel names:
DEFAULT_TOKEN_CHANNEL
HIDDEN
mode names:
DEFAULT_MODE
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 16, 79, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 12, 3, 12, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 6, 14, 72, 10, 14, 13, 14, 14, 14, 73, 3, 15, 3, 15, 3, 15, 3, 15, 2, 2, 16, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 3, 2, 3, 4, 2, 67, 92, 99, 124, 2, 79, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 3, 31, 3, 2, 2, 2, 5, 33, 3, 2, 2, 2, 7, 35, 3, 2, 2, 2, 9, 37, 3, 2, 2, 2, 11, 39, 3, 2, 2, 2, 13, 46, 3, 2, 2, 2, 15, 52, 3, 2, 2, 2, 17, 54, 3, 2, 2, 2, 19, 58, 3, 2, 2, 2, 21, 61, 3, 2, 2, 2, 23, 64, 3, 2, 2, 2, 25, 68, 3, 2, 2, 2, 27, 71, 3, 2, 2, 2, 29, 75, 3, 2, 2, 2, 31, 32, 7, 93, 2, 2, 32, 4, 3, 2, 2, 2, 33, 34, 7, 95, 2, 2, 34, 6, 3, 2, 2, 2, 35, 36, 7, 42, 2, 2, 36, 8, 3, 2, 2, 2, 37, 38, 7, 43, 2, 2, 38, 10, 3, 2, 2, 2, 39, 40, 7, 104, 2, 2, 40, 41, 7, 113, 2, 2, 41, 42, 7, 116, 2, 2, 42, 43, 7, 99, 2, 2, 43, 44, 7, 110, 2, 2, 44, 45, 7, 110, 2, 2, 45, 12, 3, 2, 2, 2, 46, 47, 7, 103, 2, 2, 47, 48, 7, 122, 2, 2, 48, 49, 7, 107, 2, 2, 49, 50, 7, 117, 2, 2, 50, 51, 7, 118, 2, 2, 51, 14, 3, 2, 2, 2, 52, 53, 7, 128, 2, 2, 53, 16, 3, 2, 2, 2, 54, 55, 7, 99, 2, 2, 55, 56, 7, 112, 2, 2, 56, 57, 7, 102, 2, 2, 57, 18, 3, 2, 2, 2, 58, 59, 7, 113, 2, 2, 59, 60, 7, 116, 2, 2, 60, 20, 3, 2, 2, 2, 61, 62, 7, 47, 2, 2, 62, 63, 7, 64, 2, 2, 63, 22, 3, 2, 2, 2, 64, 65, 7, 62, 2, 2, 65, 66, 7, 47, 2, 2, 66, 67, 7, 64, 2, 2, 67, 24, 3, 2, 2, 2, 68, 69, 7, 46, 2, 2, 69, 26, 3, 2, 2, 2, 70, 72, 9, 2, 2, 2, 71, 70, 3, 2, 2, 2, 72, 73, 3, 2, 2, 2, 73, 71, 3, 2, 2, 2, 73, 74, 3, 2, 2, 2, 74, 28, 3, 2, 2, 2, 75, 76, 7, 34, 2, 2, 76, 77, 3, 2, 2, 2, 77, 78, 8, 15, 2, 2, 78, 30, 3, 2, 2, 2, 4, 2, 73, 3, 8, 2, 2]
\ No newline at end of file
// Generated from /Users/tony/Documents/python_projects/prolog_project/LogicExpAst.g4 by ANTLR 4.8
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class LogicExpAstLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, T__1=2, T__2=3, T__3=4, T__4=5, T__5=6, T__6=7, T__7=8, T__8=9,
T__9=10, T__10=11, T__11=12, ATOM=13, WHITESPACE=14;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8",
"T__9", "T__10", "T__11", "ATOM", "WHITESPACE"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'['", "']'", "'('", "')'", "'forall'", "'exist'", "'~'", "'and'",
"'or'", "'->'", "'<->'", "','", null, "' '"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, null, null, null, null, null, null, null, null, null, null,
null, "ATOM", "WHITESPACE"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public LogicExpAstLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "LogicExpAst.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\20O\b\1\4\2\t\2\4"+
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3"+
"\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\t\3\t"+
"\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\16\6\16"+
"H\n\16\r\16\16\16I\3\17\3\17\3\17\3\17\2\2\20\3\3\5\4\7\5\t\6\13\7\r\b"+
"\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\3\2\3\4\2C\\c|\2O\2\3\3\2"+
"\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17"+
"\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2"+
"\2\2\2\33\3\2\2\2\2\35\3\2\2\2\3\37\3\2\2\2\5!\3\2\2\2\7#\3\2\2\2\t%\3"+
"\2\2\2\13\'\3\2\2\2\r.\3\2\2\2\17\64\3\2\2\2\21\66\3\2\2\2\23:\3\2\2\2"+
"\25=\3\2\2\2\27@\3\2\2\2\31D\3\2\2\2\33G\3\2\2\2\35K\3\2\2\2\37 \7]\2"+
"\2 \4\3\2\2\2!\"\7_\2\2\"\6\3\2\2\2#$\7*\2\2$\b\3\2\2\2%&\7+\2\2&\n\3"+
"\2\2\2\'(\7h\2\2()\7q\2\2)*\7t\2\2*+\7c\2\2+,\7n\2\2,-\7n\2\2-\f\3\2\2"+
"\2./\7g\2\2/\60\7z\2\2\60\61\7k\2\2\61\62\7u\2\2\62\63\7v\2\2\63\16\3"+
"\2\2\2\64\65\7\u0080\2\2\65\20\3\2\2\2\66\67\7c\2\2\678\7p\2\289\7f\2"+
"\29\22\3\2\2\2:;\7q\2\2;<\7t\2\2<\24\3\2\2\2=>\7/\2\2>?\7@\2\2?\26\3\2"+
"\2\2@A\7>\2\2AB\7/\2\2BC\7@\2\2C\30\3\2\2\2DE\7.\2\2E\32\3\2\2\2FH\t\2"+
"\2\2GF\3\2\2\2HI\3\2\2\2IG\3\2\2\2IJ\3\2\2\2J\34\3\2\2\2KL\7\"\2\2LM\3"+
"\2\2\2MN\b\17\2\2N\36\3\2\2\2\4\2I\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
\ No newline at end of file
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
ATOM=13
WHITESPACE=14
'['=1
']'=2
'('=3
')'=4
'forall'=5
'exist'=6
'~'=7
'and'=8
'or'=9
'->'=10
'<->'=11
','=12
' '=14
// Generated from /Users/tony/Documents/python_projects/prolog_project/LogicExpAst.g4 by ANTLR 4.8
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class LogicExpAstParser extends Parser {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
T__0=1, T__1=2, T__2=3, T__3=4, T__4=5, T__5=6, T__6=7, T__7=8, T__8=9,
T__9=10, T__10=11, T__11=12, ATOM=13, WHITESPACE=14;
public static final int
RULE_expr = 0, RULE_atoms = 1, RULE_atom = 2;
private static String[] makeRuleNames() {
return new String[] {
"expr", "atoms", "atom"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, "'['", "']'", "'('", "')'", "'forall'", "'exist'", "'~'", "'and'",
"'or'", "'->'", "'<->'", "','", null, "' '"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, null, null, null, null, null, null, null, null, null, null, null,
null, "ATOM", "WHITESPACE"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "LogicExpAst.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public LogicExpAstParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class ExprContext extends ParserRuleContext {
public ExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expr; }
public ExprContext() { }
public void copyFrom(ExprContext ctx) {
super.copyFrom(ctx);
}
}
public static class AndExprContext extends ExprContext {
public ExprContext exp1;
public ExprContext exp2;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public AndExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class ExistExprContext extends ExprContext {
public AtomContext var;
public ExprContext exp;
public AtomContext atom() {
return getRuleContext(AtomContext.class,0);
}
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public ExistExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class ImpliesExprContext extends ExprContext {
public ExprContext exp1;
public ExprContext exp2;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public ImpliesExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class PredExprContext extends ExprContext {
public AtomContext pre;
public AtomsContext lterms;
public AtomContext atom() {
return getRuleContext(AtomContext.class,0);
}
public AtomsContext atoms() {
return getRuleContext(AtomsContext.class,0);
}
public PredExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class NotExprContext extends ExprContext {
public ExprContext exp;
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public NotExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class ParenExprContext extends ExprContext {
public ExprContext exp;
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public ParenExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class ForallExprContext extends ExprContext {
public AtomContext var;
public ExprContext exp;
public AtomContext atom() {
return getRuleContext(AtomContext.class,0);
}
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public ForallExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class IffExprContext extends ExprContext {
public ExprContext exp1;
public ExprContext exp2;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public IffExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public static class OrExprContext extends ExprContext {
public ExprContext exp1;
public ExprContext exp2;
public List<ExprContext> expr() {
return getRuleContexts(ExprContext.class);
}
public ExprContext expr(int i) {
return getRuleContext(ExprContext.class,i);
}
public OrExprContext(ExprContext ctx) { copyFrom(ctx); }
}
public final ExprContext expr() throws RecognitionException {
return expr(0);
}
private ExprContext expr(int _p) throws RecognitionException {
ParserRuleContext _parentctx = _ctx;
int _parentState = getState();
ExprContext _localctx = new ExprContext(_ctx, _parentState);
ExprContext _prevctx = _localctx;
int _startState = 0;
enterRecursionRule(_localctx, 0, RULE_expr, _p);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(26);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ATOM:
{
_localctx = new PredExprContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(7);
((PredExprContext)_localctx).pre = atom();
setState(8);
match(T__0);
setState(9);
((PredExprContext)_localctx).lterms = atoms();
setState(10);
match(T__1);
}
break;
case T__2:
{
_localctx = new ParenExprContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(12);
match(T__2);
setState(13);
((ParenExprContext)_localctx).exp = expr(0);
setState(14);
match(T__3);
}
break;
case T__4:
{
_localctx = new ForallExprContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(16);
match(T__4);
setState(17);
((ForallExprContext)_localctx).var = atom();
setState(18);
((ForallExprContext)_localctx).exp = expr(7);
}
break;
case T__5:
{
_localctx = new ExistExprContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(20);
match(T__5);
setState(21);
((ExistExprContext)_localctx).var = atom();
setState(22);
((ExistExprContext)_localctx).exp = expr(6);
}
break;
case T__6:
{
_localctx = new NotExprContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(24);
match(T__6);
setState(25);
((NotExprContext)_localctx).exp = expr(5);
}
break;
default:
throw new NoViableAltException(this);
}
_ctx.stop = _input.LT(-1);
setState(42);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,2,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
setState(40);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) {
case 1:
{
_localctx = new AndExprContext(new ExprContext(_parentctx, _parentState));
((AndExprContext)_localctx).exp1 = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_expr);
setState(28);
if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
setState(29);
match(T__7);
setState(30);
((AndExprContext)_localctx).exp2 = expr(5);
}
break;
case 2:
{
_localctx = new OrExprContext(new ExprContext(_parentctx, _parentState));
((OrExprContext)_localctx).exp1 = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_expr);