All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.antlr.v4.tool.templates.codegen.Python3.Python3.stg Maven / Gradle / Ivy

There is a newer version: 2.12.15
Show newest version
/*
 * [The "BSD license"]
 *  Copyright (c) 2012-2016 Terence Parr
 *  Copyright (c) 2012-2016 Sam Harwell
 *  Copyright (c) 2014 Eric Vergnaud
 *  All rights reserved.
 *
 *  Redistribution and use in source and binary forms, with or without
 *  modification, are permitted provided that the following conditions
 *  are met:
 *
 *  1. Redistributions of source code must retain the above copyright
 *     notice, this list of conditions and the following disclaimer.
 *  2. Redistributions in binary form must reproduce the above copyright
 *     notice, this list of conditions and the following disclaimer in the
 *     documentation and/or other materials provided with the distribution.
 *  3. The name of the author may not be used to endorse or promote products
 *     derived from this software without specific prior written permission.
 *
 *  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
 *  IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
 *  OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
 *  IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
 *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
 *  NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 *  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 *  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 *  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
 *  THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 */

/** ANTLR tool checks output templates are compatible with tool code generation.
 *  For now, a simple string match used on x.y of x.y.z scheme.
 *  Must match Tool.VERSION during load to templates.
 *
 *  REQUIRED.
 */

pythonTypeInitMap ::= [
	"bool":"False",
	"int":"0",
	"float":"0.0",
	"str":"",
	default:"None" // anything other than a primitive type is an object
]

// args must be , 

ParserFile(file, parser, namedActions, contextSuperClass) ::= <<

# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys




>>

ListenerFile(file, header, namedActions) ::= <<

from antlr4 import *
if __name__ is not None and "." in __name__:
    from . import 
else:
    from  import 
# This class defines a complete listener for a parse tree produced by . class Listener(ParseTreeListener): #. def enter(self, ctx:.Context): pass # Exit a parse tree produced by #. def exit(self, ctx:.Context): pass }; separator="\n"> >> VisitorFile(file, header, namedActions) ::= << from antlr4 import * if __name__ is not None and "." in __name__: from . import else: from import
# This class defines a complete generic visitor for a parse tree produced by . class Visitor(ParseTreeVisitor): #. def visit(self, ctx:.Context): return self.visitChildren(ctx) }; separator="\n"> del >> fileHeader(grammarFileName, ANTLRVersion) ::= << # Generated from by ANTLR >> Parser(parser, funcs, atn, sempredFuncs, superClass) ::= << >> Parser_(parser, funcs, atn, sempredFuncs, ctor, superClass) ::= << if __name__ is not None and "." in __name__: from . import else: from import class ( Parser ): grammarFileName = "" atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] sharedContextCache = PredictionContextCache() literalNames = [ }; null="\"\\"", separator=", ", wrap, anchor> ] symbolicNames = [ }; null="\"\\"", separator=", ", wrap, anchor> ] = }; separator="\n", wrap, anchor> ruleNames = [ "}; separator=", ", wrap, anchor> ] EOF = .EOF =}; separator="\n", wrap, anchor> def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() ] = self._sempred}; separator="\n "> pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) else: return pred(localctx, predIndex) >> dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= << def action(self, localctx:RuleContext, ruleIndex:int, actionIndex:int): if self._actions is None: actions = dict() ] = self._action }; separator="\n"> self._actions = actions action = self._actions.get(ruleIndex, None) if action is not None: action(localctx, actionIndex) else: raise Exception("No registered action for:" + str(ruleIndex)) def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates is None: preds = dict() ] = self._sempred}; separator="\n"> self._predicates = preds pred = self._predicates.get(ruleIndex, None) if pred is not None: return pred(localctx, predIndex) else: raise Exception("No registered predicate for:" + str(ruleIndex)) >> parser_ctor(p) ::= << def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None >> /* This generates a private method since the actionIndex is generated, making an * overriding implementation impossible to maintain. */ RuleActionFunction(r, actions) ::= << def _action(self, localctx: , actionIndex:int): if actionIndex == : elif actionIndex == : }; separator="\n"> >> /* This generates a private method since the predIndex is generated, making an * overriding implementation impossible to maintain. */ RuleSempredFunction(r, actions) ::= << def _sempred(self, localctx:, predIndex:int): if predIndex == : return elif predIndex == : return }; separator="\n"> >> RuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,namedActions,finallyAction,postamble,exceptions) ::= << }; separator="\n"> def (self:}>): localctx = .(self, self._ctx, self.state}>) self.enterRule(localctx, , self.RULE_) try: except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.exitRule() return localctx >> LeftRecursiveRuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs, namedActions,finallyAction,postamble) ::= << }; separator="\n"> def (self, _p:int=0, }>): _parentctx = self._ctx _parentState = self.state localctx = .(self, self._ctx, _parentState}>) _prevctx = localctx _startState = self.enterRecursionRule(localctx, , self.RULE_, _p) try: except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: self.unrollRecursionContexts(_parentctx) return localctx >> CodeBlockForOuterMostAlt(currentOuterMostAltCodeBlock, locals, preamble, ops) ::= << localctx = .Context(self, localctx) self.enterOuterAlt(localctx, ) >> CodeBlockForAlt(currentAltCodeBlock, locals, preamble, ops) ::= << >> LL1AltBlock(choice, preamble, alts, error) ::= << self.state = self._errHandler.sync(self) = _input.LT(1) token = self._input.LA(1) pass}; separator="\nel"> else: >> LL1OptionalBlock(choice, alts, error) ::= << self.state = self._errHandler.sync(self) token = self._input.LA(1) pass}; separator="\nel"> else: pass >> LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= << self.state = self._errHandler.sync(self) if : ) ) !> >> LL1StarBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= << self.state = self._errHandler.sync(self) while : self.state = self._errHandler.sync(self) >> LL1PlusBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= << self.state = self._errHandler.sync(self) while True: self.state = self._errHandler.sync(self) if not (): break >> // LL(*) stuff AltBlock(choice, preamble, alts, error) ::= << self.state = self._errHandler.sync(self) = _input.LT(1) la_ = self._interp.adaptivePredict(self._input,,self._ctx) : pass }; separator="\nel"> >> OptionalBlock(choice, alts, error) ::= << self.state = self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,,self._ctx) +1: }; separator="\nel"> >> StarBlock(choice, alts, sync, iteration) ::= << self.state = self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,,self._ctx) while _alt!= and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1+1: self.state = self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,,self._ctx) >> PlusBlock(choice, alts, error) ::= << self.state = self._errHandler.sync(self) _alt = 1+1 while _alt!= and _alt!=ATN.INVALID_ALT_NUMBER: +1: }; separator="\nel"> else: self.state = self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,,self._ctx) >> Sync(s) ::= "sync()" ThrowNoViableAlt(t) ::= "raise NoViableAltException(self)" TestSetInline(s) ::= << }; separator=" or "> >> // Java language spec 15.19 - shift operators mask operands rather than overflow to 0... need range test testShiftInRange(shiftAmount) ::= << (() & ~0x3f) == 0 >> // produces smaller bytecode only when bits.ttypes contains more than two items bitsetBitfieldComparison(s, bits) ::= <% (})> and ((1 \<\< ) & ()}; separator=" | ">)) != 0) %> isZero ::= [ "0":true, default:false ] offsetShiftVar(shiftAmount, offset) ::= <% ( - ) %> offsetShiftType(shiftAmount, offset) ::= <% (. - ). %> // produces more efficient bytecode when bits.ttypes contains at most two items bitsetInlineComparison(s, bits) ::= <% ==.}; separator=" or "> %> cases(ttypes) ::= << if token in [.}; separator=", ">]: >> InvokeRule(r, argExprsChunks) ::= << self.state = = }>self.(,) >> MatchToken(m) ::= << self.state = = }>self.match(.) >> MatchSet(m, expr, capture) ::= "" MatchNotSet(m, expr, capture) ::= "" CommonSetStuff(m, expr, capture, invert) ::= << self.state = = }>self._input.LT(1) if \<= 0 or if not(): = }> self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() >> Wildcard(w) ::= << self.state = = }>self.matchWildcard() >> // ACTION STUFF Action(a, foo, chunks) ::= "" ArgAction(a, chunks) ::= "" SemPred(p, chunks, failChunks) ::= << self.state = if not : from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, , , ) >> ExceptionClause(e, catchArg, catchAction) ::= << catch () { } >> // lexer actions are not associated with model objects LexerSkipCommand() ::= "skip()" LexerMoreCommand() ::= "more()" LexerPopModeCommand() ::= "popMode()" LexerTypeCommand(arg, grammar) ::= "_type = " LexerChannelCommand(arg, grammar) ::= "_channel = " LexerModeCommand(arg, grammar) ::= "_mode = " LexerPushModeCommand(arg, grammar) ::= "pushMode()" ActionText(t) ::= "" ActionTemplate(t) ::= "" ArgRef(a) ::= "localctx." LocalRef(a) ::= "localctx." RetValueRef(a) ::= "localctx." QRetValueRef(a) ::= ".." /** How to translate $tokenLabel */ TokenRef(t) ::= "." LabelRef(t) ::= "." ListLabelRef(t) ::= "." SetAttr(s,rhsChunks) ::= ". = " TokenLabelType() ::= "" InputSymbolType() ::= "" TokenPropertyRef_text(t) ::= "(None if . is None else ..text)" TokenPropertyRef_type(t) ::= "(0 if . is None else ..type()" TokenPropertyRef_line(t) ::= "(0 if . is None else ..line)" TokenPropertyRef_pos(t) ::= "(0 if . is None else ..column)" TokenPropertyRef_channel(t) ::= "(0 if (. is None else ..channel)" TokenPropertyRef_index(t) ::= "(0 if . is None else ..tokenIndex)" TokenPropertyRef_int(t) ::= "(0 if . is None else int(..text))" RulePropertyRef_start(r) ::= "(None if . is None else ..start)" RulePropertyRef_stop(r) ::= "(None if . is None else ..stop)" RulePropertyRef_text(r) ::= "(None if . is None else self._input.getText((..start,..stop)))" RulePropertyRef_ctx(r) ::= "." RulePropertyRef_parser(r) ::= "self" ThisRulePropertyRef_start(r) ::= "localctx.start" ThisRulePropertyRef_stop(r) ::= "localctx.stop" ThisRulePropertyRef_text(r) ::= "self._input.getText((localctx.start, self._input.LT(-1)))" ThisRulePropertyRef_ctx(r) ::= "localctx" ThisRulePropertyRef_parser(r) ::= "self" NonLocalAttrRef(s) ::= "getInvokingContext()." SetNonLocalAttr(s, rhsChunks) ::= "getInvokingContext(). = " AddToLabelList(a) ::= "..append()" TokenDecl(t) ::= "self. = None # " TokenTypeDecl(t) ::= "self. = 0 # type" TokenListDecl(t) ::= "self. = list() # of s" RuleContextDecl(r) ::= "self. = None # " RuleContextListDecl(rdecl) ::= "self. = list() # of s" ContextTokenGetterDecl(t) ::= << def (self): return self.getToken(., 0) >> // should never be called ContextTokenListGetterDecl(t) ::= << def _list(self): return self.getTokens(.) >> ContextTokenListIndexedGetterDecl(t) ::= << def (self, i:int=None): if i is None: return self.getTokens(.) else: return self.getToken(., i) >> ContextRuleGetterDecl(r) ::= << def (self): return self.getTypedRuleContext(.,0) >> // should never be called ContextRuleListGetterDecl(r) ::= << def _list(self): return self.getTypedRuleContexts(.) >> ContextRuleListIndexedGetterDecl(r) ::= << def (self, i:int=None): if i is None: return self.getTypedRuleContexts(.) else: return self.getTypedRuleContext(.,i) >> LexerRuleContext() ::= "RuleContext" /** The rule context name is the rule followed by a suffix; e.g., * r becomes rContext. */ RuleContextNameSuffix() ::= "Context" ImplicitTokenLabel(tokenName) ::= "_" ImplicitRuleLabel(ruleName) ::= "_" ImplicitSetLabel(id) ::= "_tset" ListLabelName(label) ::= "