Skip to content
Snippets Groups Projects
Commit ac19a74e authored by Aurélien Lamercerie's avatar Aurélien Lamercerie
Browse files

ORG and UNL parsing (Concrete syntax only)

parent 116f46a7
No related branches found
No related tags found
No related merge requests found
Showing
with 1829 additions and 3 deletions
*.pyc
*__pycache__*
*.todo
devtemp*.py
.project
*.ttl.tbc
File deleted
File deleted
File deleted
token literal names:
null
null
null
token symbolic names:
null
WS
ORG
rule names:
orgPart
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 4, 7, 4, 2, 9, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 5, 2, 4, 3, 2, 2, 2, 4, 5, 7, 4, 2, 2, 5, 3, 3, 2, 2, 2, 2]
\ No newline at end of file
WS=1
ORG=2
token literal names:
null
null
null
token symbolic names:
null
WS
ORG
rule names:
WS
ORG
channel names:
DEFAULT_TOKEN_CHANNEL
HIDDEN
mode names:
DEFAULT_MODE
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 4, 36, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 3, 2, 6, 2, 9, 10, 2, 13, 2, 14, 2, 10, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 25, 10, 3, 12, 3, 14, 3, 28, 11, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 4, 3, 3, 5, 4, 3, 2, 3, 5, 2, 11, 12, 14, 15, 34, 34, 2, 37, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 3, 8, 3, 2, 2, 2, 5, 14, 3, 2, 2, 2, 7, 9, 9, 2, 2, 2, 8, 7, 3, 2, 2, 2, 9, 10, 3, 2, 2, 2, 10, 8, 3, 2, 2, 2, 10, 11, 3, 2, 2, 2, 11, 12, 3, 2, 2, 2, 12, 13, 8, 2, 2, 2, 13, 4, 3, 2, 2, 2, 14, 15, 7, 125, 2, 2, 15, 16, 7, 113, 2, 2, 16, 17, 7, 116, 2, 2, 17, 18, 7, 105, 2, 2, 18, 19, 7, 60, 2, 2, 19, 20, 7, 103, 2, 2, 20, 21, 7, 112, 2, 2, 21, 22, 7, 127, 2, 2, 22, 26, 3, 2, 2, 2, 23, 25, 11, 2, 2, 2, 24, 23, 3, 2, 2, 2, 25, 28, 3, 2, 2, 2, 26, 24, 3, 2, 2, 2, 26, 27, 3, 2, 2, 2, 27, 29, 3, 2, 2, 2, 28, 26, 3, 2, 2, 2, 29, 30, 7, 125, 2, 2, 30, 31, 7, 49, 2, 2, 31, 32, 7, 113, 2, 2, 32, 33, 7, 116, 2, 2, 33, 34, 7, 105, 2, 2, 34, 35, 7, 127, 2, 2, 35, 6, 3, 2, 2, 2, 5, 2, 10, 26, 3, 8, 2, 2]
\ No newline at end of file
# Generated from grammar/org/org.g4 by ANTLR 4.9.3
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\4")
buf.write("$\b\1\4\2\t\2\4\3\t\3\3\2\6\2\t\n\2\r\2\16\2\n\3\2\3\2")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3\31\n\3\f")
buf.write("\3\16\3\34\13\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\2\2\4\3\3")
buf.write("\5\4\3\2\3\5\2\13\f\16\17\"\"\2%\2\3\3\2\2\2\2\5\3\2\2")
buf.write("\2\3\b\3\2\2\2\5\16\3\2\2\2\7\t\t\2\2\2\b\7\3\2\2\2\t")
buf.write("\n\3\2\2\2\n\b\3\2\2\2\n\13\3\2\2\2\13\f\3\2\2\2\f\r\b")
buf.write("\2\2\2\r\4\3\2\2\2\16\17\7}\2\2\17\20\7q\2\2\20\21\7t")
buf.write("\2\2\21\22\7i\2\2\22\23\7<\2\2\23\24\7g\2\2\24\25\7p\2")
buf.write("\2\25\26\7\177\2\2\26\32\3\2\2\2\27\31\13\2\2\2\30\27")
buf.write("\3\2\2\2\31\34\3\2\2\2\32\30\3\2\2\2\32\33\3\2\2\2\33")
buf.write("\35\3\2\2\2\34\32\3\2\2\2\35\36\7}\2\2\36\37\7\61\2\2")
buf.write("\37 \7q\2\2 !\7t\2\2!\"\7i\2\2\"#\7\177\2\2#\6\3\2\2\2")
buf.write("\5\2\n\32\3\b\2\2")
return buf.getvalue()
class orgLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
WS = 1
ORG = 2
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
]
symbolicNames = [ "<INVALID>",
"WS", "ORG" ]
ruleNames = [ "WS", "ORG" ]
grammarFileName = "org.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.3")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
WS=1
ORG=2
# Generated from grammar/org/org.g4 by ANTLR 4.9.3
from antlr4 import *
if __name__ is not None and "." in __name__:
from .orgParser import orgParser
else:
from orgParser import orgParser
# This class defines a complete listener for a parse tree produced by orgParser.
class orgListener(ParseTreeListener):
# Enter a parse tree produced by orgParser#orgPart.
def enterOrgPart(self, ctx:orgParser.OrgPartContext):
pass
# Exit a parse tree produced by orgParser#orgPart.
def exitOrgPart(self, ctx:orgParser.OrgPartContext):
pass
del orgParser
\ No newline at end of file
# Generated from grammar/org/org.g4 by ANTLR 4.9.3
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\4")
buf.write("\7\4\2\t\2\3\2\3\2\3\2\2\2\3\2\2\2\2\5\2\4\3\2\2\2\4\5")
buf.write("\7\4\2\2\5\3\3\2\2\2\2")
return buf.getvalue()
class orgParser ( Parser ):
grammarFileName = "org.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ ]
symbolicNames = [ "<INVALID>", "WS", "ORG" ]
RULE_orgPart = 0
ruleNames = [ "orgPart" ]
EOF = Token.EOF
WS=1
ORG=2
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.3")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class OrgPartContext(ParserRuleContext):
__slots__ = 'parser'
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ORG(self):
return self.getToken(orgParser.ORG, 0)
def getRuleIndex(self):
return orgParser.RULE_orgPart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOrgPart" ):
listener.enterOrgPart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOrgPart" ):
listener.exitOrgPart(self)
def orgPart(self):
localctx = orgParser.OrgPartContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_orgPart)
try:
self.enterOuterAlt(localctx, 1)
self.state = 2
self.match(orgParser.ORG)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
...@@ -14,9 +14,56 @@ grammar unl; ...@@ -14,9 +14,56 @@ grammar unl;
//--------------------------------------------------------- //---------------------------------------------------------
unlPart unlPart
: UNL : '{unl}' (relationOccurrence)+ '{/unl}'
; ;
relationOccurrence
: universalRelation LP universalWord COMMA universalWord RP
;
universalWord
: headword
(LP restriction (COMMA restriction)* RP)?
(attribute)*
| value
;
headword
: ident
;
restriction
: universalRelation GREATER ident
;
attribute
: DOT AT ident
;
value
: VALUE
;
universalRelation
: ( AND | AOJ | BEN | CNT |
EQU | ICL | OBJ | QUA )
;
//---------------------------------------------------------
// Base Element
//---------------------------------------------------------
sentence : (word | punctuation | bracket)* ;
ident : word (UNDERSCORE word)* ;
word : LETTER | WORD ;
punctuation : DOT | COMMA | SEMCOL | COLON | DASH ;
bracket : LP | RP | LC | RC ;
//============================================================================= //=============================================================================
// Lexer Grammar // Lexer Grammar
...@@ -25,6 +72,43 @@ unlPart ...@@ -25,6 +72,43 @@ unlPart
// ignore whitespaces // ignore whitespaces
WS : (' '|'\n'|'\t'|'\r'|'\u000C')+ -> skip ; WS : (' '|'\n'|'\t'|'\r'|'\u000C')+ -> skip ;
// fragments
fragment LOWERCASE : [a-z] ;
fragment UPPERCASE : [A-Z] ;
fragment DIGIT : '0'..'9' ;
fragment ASCII : ~('\n'|'"'|'<'|'>'|'('|')') ;
// punctuation
DOT : '.' ;
COMMA : ',' ;
SEMCOL : ';' ;
COLON : ':' ;
DASH : '-' ;
// brackets
LP : '(' ; // Left parenthesis
RP : ')' ;
LC : '{' ; // Left curly bracket
RC : '}' ;
// symbols
LESS : '<' ;
GREATER : '>' ;
AT : '@' ;
UNDERSCORE : '_' ;
// relations
AND : 'and' ;
AOJ : 'aoj' ;
BEN : 'ben' ;
CNT : 'cnt' ;
EQU : 'equ' ;
ICL : 'icl' ;
OBJ : 'obj' ;
QUA : 'qua' ;
// other tokens // other tokens
ORG : '{org:en}' (.)* '{/org}' ; LETTER : LOWERCASE | UPPERCASE ;
UNL : '{unl}' (.)* '{/unl}' ; WORD : (LETTER)+ ;
VALUE : (DIGIT)+ (DOT (DIGIT)+)? ;
token literal names:
null
'{unl}'
'{/unl}'
null
'.'
','
';'
':'
'-'
'('
')'
'{'
'}'
'<'
'>'
'@'
'_'
'and'
'aoj'
'ben'
'cnt'
'equ'
'icl'
'obj'
'qua'
null
null
null
token symbolic names:
null
null
null
WS
DOT
COMMA
SEMCOL
COLON
DASH
LP
RP
LC
RC
LESS
GREATER
AT
UNDERSCORE
AND
AOJ
BEN
CNT
EQU
ICL
OBJ
QUA
LETTER
WORD
VALUE
rule names:
unlPart
relationOccurrence
universalWord
headword
restriction
attribute
value
universalRelation
sentence
ident
word
punctuation
bracket
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 29, 103, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 3, 2, 3, 2, 6, 2, 31, 10, 2, 13, 2, 14, 2, 32, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 7, 4, 49, 10, 4, 12, 4, 14, 4, 52, 11, 4, 3, 4, 3, 4, 5, 4, 56, 10, 4, 3, 4, 7, 4, 59, 10, 4, 12, 4, 14, 4, 62, 11, 4, 3, 4, 5, 4, 65, 10, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 7, 10, 84, 10, 10, 12, 10, 14, 10, 87, 11, 10, 3, 11, 3, 11, 3, 11, 7, 11, 92, 10, 11, 12, 11, 14, 11, 95, 11, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 2, 2, 15, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 2, 6, 3, 2, 19, 26, 3, 2, 27, 28, 3, 2, 6, 10, 3, 2, 11, 14, 2, 98, 2, 28, 3, 2, 2, 2, 4, 36, 3, 2, 2, 2, 6, 64, 3, 2, 2, 2, 8, 66, 3, 2, 2, 2, 10, 68, 3, 2, 2, 2, 12, 72, 3, 2, 2, 2, 14, 76, 3, 2, 2, 2, 16, 78, 3, 2, 2, 2, 18, 85, 3, 2, 2, 2, 20, 88, 3, 2, 2, 2, 22, 96, 3, 2, 2, 2, 24, 98, 3, 2, 2, 2, 26, 100, 3, 2, 2, 2, 28, 30, 7, 3, 2, 2, 29, 31, 5, 4, 3, 2, 30, 29, 3, 2, 2, 2, 31, 32, 3, 2, 2, 2, 32, 30, 3, 2, 2, 2, 32, 33, 3, 2, 2, 2, 33, 34, 3, 2, 2, 2, 34, 35, 7, 4, 2, 2, 35, 3, 3, 2, 2, 2, 36, 37, 5, 16, 9, 2, 37, 38, 7, 11, 2, 2, 38, 39, 5, 6, 4, 2, 39, 40, 7, 7, 2, 2, 40, 41, 5, 6, 4, 2, 41, 42, 7, 12, 2, 2, 42, 5, 3, 2, 2, 2, 43, 55, 5, 8, 5, 2, 44, 45, 7, 11, 2, 2, 45, 50, 5, 10, 6, 2, 46, 47, 7, 7, 2, 2, 47, 49, 5, 10, 6, 2, 48, 46, 3, 2, 2, 2, 49, 52, 3, 2, 2, 2, 50, 48, 3, 2, 2, 2, 50, 51, 3, 2, 2, 2, 51, 53, 3, 2, 2, 2, 52, 50, 3, 2, 2, 2, 53, 54, 7, 12, 2, 2, 54, 56, 3, 2, 2, 2, 55, 44, 3, 2, 2, 2, 55, 56, 3, 2, 2, 2, 56, 60, 3, 2, 2, 2, 57, 59, 5, 12, 7, 2, 58, 57, 3, 2, 2, 2, 59, 62, 3, 2, 2, 2, 60, 58, 3, 2, 2, 2, 60, 61, 3, 2, 2, 2, 61, 65, 3, 2, 2, 2, 62, 60, 3, 2, 2, 2, 63, 65, 5, 14, 8, 2, 64, 43, 3, 2, 2, 2, 64, 63, 3, 2, 2, 2, 65, 7, 3, 2, 2, 2, 66, 67, 5, 20, 11, 2, 67, 9, 3, 2, 2, 2, 68, 69, 5, 16, 9, 2, 69, 70, 7, 16, 2, 2, 70, 71, 5, 20, 11, 2, 71, 11, 3, 2, 2, 2, 72, 73, 7, 6, 2, 2, 73, 74, 7, 17, 2, 2, 74, 75, 5, 20, 11, 2, 75, 13, 3, 2, 2, 2, 76, 77, 7, 29, 2, 2, 77, 15, 3, 2, 2, 2, 78, 79, 9, 2, 2, 2, 79, 17, 3, 2, 2, 2, 80, 84, 5, 22, 12, 2, 81, 84, 5, 24, 13, 2, 82, 84, 5, 26, 14, 2, 83, 80, 3, 2, 2, 2, 83, 81, 3, 2, 2, 2, 83, 82, 3, 2, 2, 2, 84, 87, 3, 2, 2, 2, 85, 83, 3, 2, 2, 2, 85, 86, 3, 2, 2, 2, 86, 19, 3, 2, 2, 2, 87, 85, 3, 2, 2, 2, 88, 93, 5, 22, 12, 2, 89, 90, 7, 18, 2, 2, 90, 92, 5, 22, 12, 2, 91, 89, 3, 2, 2, 2, 92, 95, 3, 2, 2, 2, 93, 91, 3, 2, 2, 2, 93, 94, 3, 2, 2, 2, 94, 21, 3, 2, 2, 2, 95, 93, 3, 2, 2, 2, 96, 97, 9, 3, 2, 2, 97, 23, 3, 2, 2, 2, 98, 99, 9, 4, 2, 2, 99, 25, 3, 2, 2, 2, 100, 101, 9, 5, 2, 2, 101, 27, 3, 2, 2, 2, 10, 32, 50, 55, 60, 64, 83, 85, 93]
\ No newline at end of file
T__0=1
T__1=2
WS=3
DOT=4
COMMA=5
SEMCOL=6
COLON=7
DASH=8
LP=9
RP=10
LC=11
RC=12
LESS=13
GREATER=14
AT=15
UNDERSCORE=16
AND=17
AOJ=18
BEN=19
CNT=20
EQU=21
ICL=22
OBJ=23
QUA=24
LETTER=25
WORD=26
VALUE=27
'{unl}'=1
'{/unl}'=2
'.'=4
','=5
';'=6
':'=7
'-'=8
'('=9
')'=10
'{'=11
'}'=12
'<'=13
'>'=14
'@'=15
'_'=16
'and'=17
'aoj'=18
'ben'=19
'cnt'=20
'equ'=21
'icl'=22
'obj'=23
'qua'=24
token literal names:
null
'{unl}'
'{/unl}'
null
'.'
','
';'
':'
'-'
'('
')'
'{'
'}'
'<'
'>'
'@'
'_'
'and'
'aoj'
'ben'
'cnt'
'equ'
'icl'
'obj'
'qua'
null
null
null
token symbolic names:
null
null
null
WS
DOT
COMMA
SEMCOL
COLON
DASH
LP
RP
LC
RC
LESS
GREATER
AT
UNDERSCORE
AND
AOJ
BEN
CNT
EQU
ICL
OBJ
QUA
LETTER
WORD
VALUE
rule names:
T__0
T__1
WS
LOWERCASE
UPPERCASE
DIGIT
ASCII
DOT
COMMA
SEMCOL
COLON
DASH
LP
RP
LC
RC
LESS
GREATER
AT
UNDERSCORE
AND
AOJ
BEN
CNT
EQU
ICL
OBJ
QUA
LETTER
WORD
VALUE
channel names:
DEFAULT_TOKEN_CHANNEL
HIDDEN
mode names:
DEFAULT_MODE
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 29, 173, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 6, 4, 80, 10, 4, 13, 4, 14, 4, 81, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, 15, 3, 16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 3, 23, 3, 24, 3, 24, 3, 24, 3, 24, 3, 25, 3, 25, 3, 25, 3, 25, 3, 26, 3, 26, 3, 26, 3, 26, 3, 27, 3, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 28, 3, 28, 3, 29, 3, 29, 3, 29, 3, 29, 3, 30, 3, 30, 5, 30, 154, 10, 30, 3, 31, 6, 31, 157, 10, 31, 13, 31, 14, 31, 158, 3, 32, 6, 32, 162, 10, 32, 13, 32, 14, 32, 163, 3, 32, 3, 32, 6, 32, 168, 10, 32, 13, 32, 14, 32, 169, 5, 32, 172, 10, 32, 2, 2, 33, 3, 3, 5, 4, 7, 5, 9, 2, 11, 2, 13, 2, 15, 2, 17, 6, 19, 7, 21, 8, 23, 9, 25, 10, 27, 11, 29, 12, 31, 13, 33, 14, 35, 15, 37, 16, 39, 17, 41, 18, 43, 19, 45, 20, 47, 21, 49, 22, 51, 23, 53, 24, 55, 25, 57, 26, 59, 27, 61, 28, 63, 29, 3, 2, 6, 5, 2, 11, 12, 14, 15, 34, 34, 3, 2, 99, 124, 3, 2, 67, 92, 7, 2, 12, 12, 36, 36, 42, 43, 62, 62, 64, 64, 2, 174, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, 63, 3, 2, 2, 2, 3, 65, 3, 2, 2, 2, 5, 71, 3, 2, 2, 2, 7, 79, 3, 2, 2, 2, 9, 85, 3, 2, 2, 2, 11, 87, 3, 2, 2, 2, 13, 89, 3, 2, 2, 2, 15, 91, 3, 2, 2, 2, 17, 93, 3, 2, 2, 2, 19, 95, 3, 2, 2, 2, 21, 97, 3, 2, 2, 2, 23, 99, 3, 2, 2, 2, 25, 101, 3, 2, 2, 2, 27, 103, 3, 2, 2, 2, 29, 105, 3, 2, 2, 2, 31, 107, 3, 2, 2, 2, 33, 109, 3, 2, 2, 2, 35, 111, 3, 2, 2, 2, 37, 113, 3, 2, 2, 2, 39, 115, 3, 2, 2, 2, 41, 117, 3, 2, 2, 2, 43, 119, 3, 2, 2, 2, 45, 123, 3, 2, 2, 2, 47, 127, 3, 2, 2, 2, 49, 131, 3, 2, 2, 2, 51, 135, 3, 2, 2, 2, 53, 139, 3, 2, 2, 2, 55, 143, 3, 2, 2, 2, 57, 147, 3, 2, 2, 2, 59, 153, 3, 2, 2, 2, 61, 156, 3, 2, 2, 2, 63, 161, 3, 2, 2, 2, 65, 66, 7, 125, 2, 2, 66, 67, 7, 119, 2, 2, 67, 68, 7, 112, 2, 2, 68, 69, 7, 110, 2, 2, 69, 70, 7, 127, 2, 2, 70, 4, 3, 2, 2, 2, 71, 72, 7, 125, 2, 2, 72, 73, 7, 49, 2, 2, 73, 74, 7, 119, 2, 2, 74, 75, 7, 112, 2, 2, 75, 76, 7, 110, 2, 2, 76, 77, 7, 127, 2, 2, 77, 6, 3, 2, 2, 2, 78, 80, 9, 2, 2, 2, 79, 78, 3, 2, 2, 2, 80, 81, 3, 2, 2, 2, 81, 79, 3, 2, 2, 2, 81, 82, 3, 2, 2, 2, 82, 83, 3, 2, 2, 2, 83, 84, 8, 4, 2, 2, 84, 8, 3, 2, 2, 2, 85, 86, 9, 3, 2, 2, 86, 10, 3, 2, 2, 2, 87, 88, 9, 4, 2, 2, 88, 12, 3, 2, 2, 2, 89, 90, 4, 50, 59, 2, 90, 14, 3, 2, 2, 2, 91, 92, 10, 5, 2, 2, 92, 16, 3, 2, 2, 2, 93, 94, 7, 48, 2, 2, 94, 18, 3, 2, 2, 2, 95, 96, 7, 46, 2, 2, 96, 20, 3, 2, 2, 2, 97, 98, 7, 61, 2, 2, 98, 22, 3, 2, 2, 2, 99, 100, 7, 60, 2, 2, 100, 24, 3, 2, 2, 2, 101, 102, 7, 47, 2, 2, 102, 26, 3, 2, 2, 2, 103, 104, 7, 42, 2, 2, 104, 28, 3, 2, 2, 2, 105, 106, 7, 43, 2, 2, 106, 30, 3, 2, 2, 2, 107, 108, 7, 125, 2, 2, 108, 32, 3, 2, 2, 2, 109, 110, 7, 127, 2, 2, 110, 34, 3, 2, 2, 2, 111, 112, 7, 62, 2, 2, 112, 36, 3, 2, 2, 2, 113, 114, 7, 64, 2, 2, 114, 38, 3, 2, 2, 2, 115, 116, 7, 66, 2, 2, 116, 40, 3, 2, 2, 2, 117, 118, 7, 97, 2, 2, 118, 42, 3, 2, 2, 2, 119, 120, 7, 99, 2, 2, 120, 121, 7, 112, 2, 2, 121, 122, 7, 102, 2, 2, 122, 44, 3, 2, 2, 2, 123, 124, 7, 99, 2, 2, 124, 125, 7, 113, 2, 2, 125, 126, 7, 108, 2, 2, 126, 46, 3, 2, 2, 2, 127, 128, 7, 100, 2, 2, 128, 129, 7, 103, 2, 2, 129, 130, 7, 112, 2, 2, 130, 48, 3, 2, 2, 2, 131, 132, 7, 101, 2, 2, 132, 133, 7, 112, 2, 2, 133, 134, 7, 118, 2, 2, 134, 50, 3, 2, 2, 2, 135, 136, 7, 103, 2, 2, 136, 137, 7, 115, 2, 2, 137, 138, 7, 119, 2, 2, 138, 52, 3, 2, 2, 2, 139, 140, 7, 107, 2, 2, 140, 141, 7, 101, 2, 2, 141, 142, 7, 110, 2, 2, 142, 54, 3, 2, 2, 2, 143, 144, 7, 113, 2, 2, 144, 145, 7, 100, 2, 2, 145, 146, 7, 108, 2, 2, 146, 56, 3, 2, 2, 2, 147, 148, 7, 115, 2, 2, 148, 149, 7, 119, 2, 2, 149, 150, 7, 99, 2, 2, 150, 58, 3, 2, 2, 2, 151, 154, 5, 9, 5, 2, 152, 154, 5, 11, 6, 2, 153, 151, 3, 2, 2, 2, 153, 152, 3, 2, 2, 2, 154, 60, 3, 2, 2, 2, 155, 157, 5, 59, 30, 2, 156, 155, 3, 2, 2, 2, 157, 158, 3, 2, 2, 2, 158, 156, 3, 2, 2, 2, 158, 159, 3, 2, 2, 2, 159, 62, 3, 2, 2, 2, 160, 162, 5, 13, 7, 2, 161, 160, 3, 2, 2, 2, 162, 163, 3, 2, 2, 2, 163, 161, 3, 2, 2, 2, 163, 164, 3, 2, 2, 2, 164, 171, 3, 2, 2, 2, 165, 167, 5, 17, 9, 2, 166, 168, 5, 13, 7, 2, 167, 166, 3, 2, 2, 2, 168, 169, 3, 2, 2, 2, 169, 167, 3, 2, 2, 2, 169, 170, 3, 2, 2, 2, 170, 172, 3, 2, 2, 2, 171, 165, 3, 2, 2, 2, 171, 172, 3, 2, 2, 2, 172, 64, 3, 2, 2, 2, 9, 2, 81, 153, 158, 163, 169, 171, 3, 8, 2, 2]
\ No newline at end of file
# Generated from grammar/unl/unl.g4 by ANTLR 4.9.3
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\35")
buf.write("\u00ad\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \3\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\4\6\4P\n\4\r\4\16\4Q\3\4\3\4\3")
buf.write("\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3")
buf.write("\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20\3\21")
buf.write("\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26\3\26")
buf.write("\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31")
buf.write("\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33")
buf.write("\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36\3\36\5\36")
buf.write("\u009a\n\36\3\37\6\37\u009d\n\37\r\37\16\37\u009e\3 \6")
buf.write(" \u00a2\n \r \16 \u00a3\3 \3 \6 \u00a8\n \r \16 \u00a9")
buf.write("\5 \u00ac\n \2\2!\3\3\5\4\7\5\t\2\13\2\r\2\17\2\21\6\23")
buf.write("\7\25\b\27\t\31\n\33\13\35\f\37\r!\16#\17%\20\'\21)\22")
buf.write("+\23-\24/\25\61\26\63\27\65\30\67\319\32;\33=\34?\35\3")
buf.write("\2\6\5\2\13\f\16\17\"\"\3\2c|\3\2C\\\7\2\f\f$$*+>>@@\2")
buf.write("\u00ae\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\21\3\2\2")
buf.write("\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2")
buf.write("\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#")
buf.write("\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2")
buf.write("\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65")
buf.write("\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2")
buf.write("\2?\3\2\2\2\3A\3\2\2\2\5G\3\2\2\2\7O\3\2\2\2\tU\3\2\2")
buf.write("\2\13W\3\2\2\2\rY\3\2\2\2\17[\3\2\2\2\21]\3\2\2\2\23_")
buf.write("\3\2\2\2\25a\3\2\2\2\27c\3\2\2\2\31e\3\2\2\2\33g\3\2\2")
buf.write("\2\35i\3\2\2\2\37k\3\2\2\2!m\3\2\2\2#o\3\2\2\2%q\3\2\2")
buf.write("\2\'s\3\2\2\2)u\3\2\2\2+w\3\2\2\2-{\3\2\2\2/\177\3\2\2")
buf.write("\2\61\u0083\3\2\2\2\63\u0087\3\2\2\2\65\u008b\3\2\2\2")
buf.write("\67\u008f\3\2\2\29\u0093\3\2\2\2;\u0099\3\2\2\2=\u009c")
buf.write("\3\2\2\2?\u00a1\3\2\2\2AB\7}\2\2BC\7w\2\2CD\7p\2\2DE\7")
buf.write("n\2\2EF\7\177\2\2F\4\3\2\2\2GH\7}\2\2HI\7\61\2\2IJ\7w")
buf.write("\2\2JK\7p\2\2KL\7n\2\2LM\7\177\2\2M\6\3\2\2\2NP\t\2\2")
buf.write("\2ON\3\2\2\2PQ\3\2\2\2QO\3\2\2\2QR\3\2\2\2RS\3\2\2\2S")
buf.write("T\b\4\2\2T\b\3\2\2\2UV\t\3\2\2V\n\3\2\2\2WX\t\4\2\2X\f")
buf.write("\3\2\2\2YZ\4\62;\2Z\16\3\2\2\2[\\\n\5\2\2\\\20\3\2\2\2")
buf.write("]^\7\60\2\2^\22\3\2\2\2_`\7.\2\2`\24\3\2\2\2ab\7=\2\2")
buf.write("b\26\3\2\2\2cd\7<\2\2d\30\3\2\2\2ef\7/\2\2f\32\3\2\2\2")
buf.write("gh\7*\2\2h\34\3\2\2\2ij\7+\2\2j\36\3\2\2\2kl\7}\2\2l ")
buf.write("\3\2\2\2mn\7\177\2\2n\"\3\2\2\2op\7>\2\2p$\3\2\2\2qr\7")
buf.write("@\2\2r&\3\2\2\2st\7B\2\2t(\3\2\2\2uv\7a\2\2v*\3\2\2\2")
buf.write("wx\7c\2\2xy\7p\2\2yz\7f\2\2z,\3\2\2\2{|\7c\2\2|}\7q\2")
buf.write("\2}~\7l\2\2~.\3\2\2\2\177\u0080\7d\2\2\u0080\u0081\7g")
buf.write("\2\2\u0081\u0082\7p\2\2\u0082\60\3\2\2\2\u0083\u0084\7")
buf.write("e\2\2\u0084\u0085\7p\2\2\u0085\u0086\7v\2\2\u0086\62\3")
buf.write("\2\2\2\u0087\u0088\7g\2\2\u0088\u0089\7s\2\2\u0089\u008a")
buf.write("\7w\2\2\u008a\64\3\2\2\2\u008b\u008c\7k\2\2\u008c\u008d")
buf.write("\7e\2\2\u008d\u008e\7n\2\2\u008e\66\3\2\2\2\u008f\u0090")
buf.write("\7q\2\2\u0090\u0091\7d\2\2\u0091\u0092\7l\2\2\u00928\3")
buf.write("\2\2\2\u0093\u0094\7s\2\2\u0094\u0095\7w\2\2\u0095\u0096")
buf.write("\7c\2\2\u0096:\3\2\2\2\u0097\u009a\5\t\5\2\u0098\u009a")
buf.write("\5\13\6\2\u0099\u0097\3\2\2\2\u0099\u0098\3\2\2\2\u009a")
buf.write("<\3\2\2\2\u009b\u009d\5;\36\2\u009c\u009b\3\2\2\2\u009d")
buf.write("\u009e\3\2\2\2\u009e\u009c\3\2\2\2\u009e\u009f\3\2\2\2")
buf.write("\u009f>\3\2\2\2\u00a0\u00a2\5\r\7\2\u00a1\u00a0\3\2\2")
buf.write("\2\u00a2\u00a3\3\2\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a4")
buf.write("\3\2\2\2\u00a4\u00ab\3\2\2\2\u00a5\u00a7\5\21\t\2\u00a6")
buf.write("\u00a8\5\r\7\2\u00a7\u00a6\3\2\2\2\u00a8\u00a9\3\2\2\2")
buf.write("\u00a9\u00a7\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00ac\3")
buf.write("\2\2\2\u00ab\u00a5\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac@")
buf.write("\3\2\2\2\t\2Q\u0099\u009e\u00a3\u00a9\u00ab\3\b\2\2")
return buf.getvalue()
class unlLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
WS = 3
DOT = 4
COMMA = 5
SEMCOL = 6
COLON = 7
DASH = 8
LP = 9
RP = 10
LC = 11
RC = 12
LESS = 13
GREATER = 14
AT = 15
UNDERSCORE = 16
AND = 17
AOJ = 18
BEN = 19
CNT = 20
EQU = 21
ICL = 22
OBJ = 23
QUA = 24
LETTER = 25
WORD = 26
VALUE = 27
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'{unl}'", "'{/unl}'", "'.'", "','", "';'", "':'", "'-'", "'('",
"')'", "'{'", "'}'", "'<'", "'>'", "'@'", "'_'", "'and'", "'aoj'",
"'ben'", "'cnt'", "'equ'", "'icl'", "'obj'", "'qua'" ]
symbolicNames = [ "<INVALID>",
"WS", "DOT", "COMMA", "SEMCOL", "COLON", "DASH", "LP", "RP",
"LC", "RC", "LESS", "GREATER", "AT", "UNDERSCORE", "AND", "AOJ",
"BEN", "CNT", "EQU", "ICL", "OBJ", "QUA", "LETTER", "WORD",
"VALUE" ]
ruleNames = [ "T__0", "T__1", "WS", "LOWERCASE", "UPPERCASE", "DIGIT",
"ASCII", "DOT", "COMMA", "SEMCOL", "COLON", "DASH", "LP",
"RP", "LC", "RC", "LESS", "GREATER", "AT", "UNDERSCORE",
"AND", "AOJ", "BEN", "CNT", "EQU", "ICL", "OBJ", "QUA",
"LETTER", "WORD", "VALUE" ]
grammarFileName = "unl.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.3")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
T__0=1
T__1=2
WS=3
DOT=4
COMMA=5
SEMCOL=6
COLON=7
DASH=8
LP=9
RP=10
LC=11
RC=12
LESS=13
GREATER=14
AT=15
UNDERSCORE=16
AND=17
AOJ=18
BEN=19
CNT=20
EQU=21
ICL=22
OBJ=23
QUA=24
LETTER=25
WORD=26
VALUE=27
'{unl}'=1
'{/unl}'=2
'.'=4
','=5
';'=6
':'=7
'-'=8
'('=9
')'=10
'{'=11
'}'=12
'<'=13
'>'=14
'@'=15
'_'=16
'and'=17
'aoj'=18
'ben'=19
'cnt'=20
'equ'=21
'icl'=22
'obj'=23
'qua'=24
# Generated from grammar/unl/unl.g4 by ANTLR 4.9.3
from antlr4 import *
if __name__ is not None and "." in __name__:
from .unlParser import unlParser
else:
from unlParser import unlParser
# This class defines a complete listener for a parse tree produced by unlParser.
class unlListener(ParseTreeListener):
# Enter a parse tree produced by unlParser#unlPart.
def enterUnlPart(self, ctx:unlParser.UnlPartContext):
pass
# Exit a parse tree produced by unlParser#unlPart.
def exitUnlPart(self, ctx:unlParser.UnlPartContext):
pass
# Enter a parse tree produced by unlParser#relationOccurrence.
def enterRelationOccurrence(self, ctx:unlParser.RelationOccurrenceContext):
pass
# Exit a parse tree produced by unlParser#relationOccurrence.
def exitRelationOccurrence(self, ctx:unlParser.RelationOccurrenceContext):
pass
# Enter a parse tree produced by unlParser#universalWord.
def enterUniversalWord(self, ctx:unlParser.UniversalWordContext):
pass
# Exit a parse tree produced by unlParser#universalWord.
def exitUniversalWord(self, ctx:unlParser.UniversalWordContext):
pass
# Enter a parse tree produced by unlParser#headword.
def enterHeadword(self, ctx:unlParser.HeadwordContext):
pass
# Exit a parse tree produced by unlParser#headword.
def exitHeadword(self, ctx:unlParser.HeadwordContext):
pass
# Enter a parse tree produced by unlParser#restriction.
def enterRestriction(self, ctx:unlParser.RestrictionContext):
pass
# Exit a parse tree produced by unlParser#restriction.
def exitRestriction(self, ctx:unlParser.RestrictionContext):
pass
# Enter a parse tree produced by unlParser#attribute.
def enterAttribute(self, ctx:unlParser.AttributeContext):
pass
# Exit a parse tree produced by unlParser#attribute.
def exitAttribute(self, ctx:unlParser.AttributeContext):
pass
# Enter a parse tree produced by unlParser#value.
def enterValue(self, ctx:unlParser.ValueContext):
pass
# Exit a parse tree produced by unlParser#value.
def exitValue(self, ctx:unlParser.ValueContext):
pass
# Enter a parse tree produced by unlParser#universalRelation.
def enterUniversalRelation(self, ctx:unlParser.UniversalRelationContext):
pass
# Exit a parse tree produced by unlParser#universalRelation.
def exitUniversalRelation(self, ctx:unlParser.UniversalRelationContext):
pass
# Enter a parse tree produced by unlParser#sentence.
def enterSentence(self, ctx:unlParser.SentenceContext):
pass
# Exit a parse tree produced by unlParser#sentence.
def exitSentence(self, ctx:unlParser.SentenceContext):
pass
# Enter a parse tree produced by unlParser#ident.
def enterIdent(self, ctx:unlParser.IdentContext):
pass
# Exit a parse tree produced by unlParser#ident.
def exitIdent(self, ctx:unlParser.IdentContext):
pass
# Enter a parse tree produced by unlParser#word.
def enterWord(self, ctx:unlParser.WordContext):
pass
# Exit a parse tree produced by unlParser#word.
def exitWord(self, ctx:unlParser.WordContext):
pass
# Enter a parse tree produced by unlParser#punctuation.
def enterPunctuation(self, ctx:unlParser.PunctuationContext):
pass
# Exit a parse tree produced by unlParser#punctuation.
def exitPunctuation(self, ctx:unlParser.PunctuationContext):
pass
# Enter a parse tree produced by unlParser#bracket.
def enterBracket(self, ctx:unlParser.BracketContext):
pass
# Exit a parse tree produced by unlParser#bracket.
def exitBracket(self, ctx:unlParser.BracketContext):
pass
del unlParser
\ No newline at end of file
This diff is collapsed.
[D]
[S:R1]
{org:en}
The system allows a radio channel to take on two states: Listening and Traffic.
{/org}
{unl}
aoj( allow(icl>be, aoj>thing, ben>thing, obj>uw, equ>make_possible).@entry, system(icl>group).@def )
obj( allow(icl>be, aoj>thing, ben>thing, obj>uw, equ>make_possible).@entry, take_on(aoj>thing, equ>assume,icl>change, obj>thing) )
ben( allow(icl>be, aoj>thing, ben>thing, obj>uw, equ>make_possible).@entry, channel(icl>radiowave).@indef)
aoj( take_on(aoj>thing, equ>assume, icl>change, obj>thing), channel(icl>radiowave).@indef )
obj( take_on(aoj>thing, equ>assume, icl>change, obj>thing), state(icl>attribute).@plu )
qua( state(icl>attribute).@plu, 2 )
cnt( state(icl>attribute).@plu, listening(icl>sensing) )
and( listening(icl>sensing),traffic(icl>communication) )
{/unl}
[/S]
[/D]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment