All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.antlr.codegen.templates.CSharp2.CSharp2.stg Maven / Gradle / Ivy

There is a newer version: 3.5.3
Show newest version
/*
 * [The "BSD license"]
 * Copyright (c) 2007-2008 Johannes Luber
 * Copyright (c) 2005-2007 Kunle Odutola
 * Copyright (c) 2011 Sam Harwell
 * Copyright (c) 2011 Terence Parr
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 * 3. The name of the author may not be used to endorse or promote products
 *    derived from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 */

csharpVisibilityMap ::= [
	"private":"private",
	"protected":"protected",
	"public":"public",
	"fragment":"private",
	default:"private"
]

/** The overall file structure of a recognizer; stores methods for rules
 *  and cyclic DFAs plus support code.
 */
outputFile(	LEXER,PARSER,TREE_PARSER, actionScope, actions,
			docComment, recognizer,
			name, tokens, tokenNames, rules, cyclicDFAs,
			bitsets, buildTemplate, buildAST, rewriteMode, profile,
			backtracking, synpreds, memoize, numRules,
			fileName, ANTLRVersion, generatedTimestamp, trace,
			scopes, superClass, literals) ::=
<<
//------------------------------------------------------------------------------
// \
//     This code was generated by a tool.
//     ANTLR Version: 
//
//     Changes to this file may cause incorrect behavior and will be lost if
//     the code is regenerated.
// \
//------------------------------------------------------------------------------

// $ANTLR   


#define ANTLR_TRACE

<@debugPreprocessor()>
// The variable 'variable' is assigned but its value is never used.
#pragma warning disable 168, 219
// Unreachable code detected.
#pragma warning disable 162
// Missing XML comment for publicly visible type or member 'Type_or_Member'
#pragma warning disable 1591



<@imports>
using System.Collections.Generic;
using Antlr.Runtime;
using Antlr.Runtime.Misc;

using Antlr.Runtime.Tree;
using RewriteRuleITokenStream = Antlr.Runtime.Tree.RewriteRuleTokenStream;

using ConditionalAttribute = System.Diagnostics.ConditionalAttribute;
<@end>

namespace 
{





} // namespace 

>>

lexerInputStreamType() ::= <<

>>

lexer(grammar, name, tokens, scopes, rules, numRules, filterMode, labelType="CommonToken",
      superClass={Antlr.Runtime.Lexer}) ::= <<
[System.CodeDom.Compiler.GeneratedCode("ANTLR", "")]
[System.CLSCompliant(false)]
 partial class  : <@superClassName><@end>
{
	=;}; separator="\n">
	}>
	

    // delegates
     ;}; separator="\n">
    // delegators
     ;}; separator="\n">
     gParent;}>

	 ()
	{
		OnCreated();
	}

	 ( input }> )
		: this(input, new RecognizerSharedState()}>)
	{
	}

	 ( input, RecognizerSharedState state }>)
		: base(input, state)
	{


		state.ruleMemo = new System.Collections.Generic.Dictionary\[+1];<\n>


		 = new (input, this.state}>, this);}; separator="\n">
		 = ;}; separator="\n">
		;}>

		OnCreated();
	}
	public override string GrammarFileName { get { return ""; } }

	private static readonly bool[] decisionCanBacktrack = new bool[0];


	public override  CharStream
	{
		get
		{
			return base.CharStream;
		}
		set
		{
			base.CharStream = value;
			 = new (input, state}>, this);}; separator="\n">
			 = ;}; separator="\n">
			;}>
		}
	}


	public override void SetState(RecognizerSharedState state)
	{
		base.SetState(state);
		.SetState(state);}; separator="\n">
	}




	


	[Conditional("ANTLR_TRACE")]
	protected virtual void OnCreated() {}
	[Conditional("ANTLR_TRACE")]
	protected virtual void EnterRule(string ruleName, int ruleIndex) {}
	[Conditional("ANTLR_TRACE")]
	protected virtual void LeaveRule(string ruleName, int ruleIndex) {}

    

	

	#region DFA
	 dfa;}; separator="\n">

	protected override void InitDFAs()
	{
		base.InitDFAs();
		 = new DFA(this, SpecialStateTransition);}; separator="\n">
	}

	 
	#endregion

}
>>

/** A override of Lexer.nextToken() that backtracks over mTokens() looking
 *  for matches.  No error can be generated upon error; just rewind, consume
 *  a token and then try again.  backtracking needs to be set as well.
 *  Make rule memoization happen only at levels above 1 as we start mTokens
 *  at backtracking==1.
 */
filteringNextToken() ::= <<
public override IToken NextToken()
{
	while (true)
	{
		if (input.LA(1) == CharStreamConstants.EndOfFile)
		{
			IToken eof = new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, input.Index, input.Index);
			eof.Line = Line;
			eof.CharPositionInLine = CharPositionInLine;
			return eof;
		}
		state.token = null;
		state.channel = TokenChannels.Default;
		state.tokenStartCharIndex = input.Index;
		state.tokenStartCharPositionInLine = input.CharPositionInLine;
		state.tokenStartLine = input.Line;
		state.text = null;
		try
		{
			int m = input.Mark();
			state.backtracking=1;
			state.failed=false;
			mTokens();
			state.backtracking=0;
			
			if (state.failed)
			{
				input.Rewind(m);
				input.Consume();
			}
			else
			{
				Emit();
				return state.token;
			}
		}
		catch (RecognitionException re)
		{
			// shouldn't happen in backtracking mode, but...
			ReportError(re);
			Recover(re);
		}
	}
}

public override void Memoize(IIntStream input, int ruleIndex, int ruleStartIndex)
{
	if (state.backtracking > 1)
		base.Memoize(input, ruleIndex, ruleStartIndex);
}

public override bool AlreadyParsedRule(IIntStream input, int ruleIndex)
{
	if (state.backtracking > 1)
		return base.AlreadyParsedRule(input, ruleIndex);

	return false;
}
>>

actionGate() ::= "state.backtracking == 0"

filteringActionGate() ::= "state.backtracking == 1"

/** How to generate a parser */
genericParser(grammar, name, scopes, tokens, tokenNames, rules, numRules,
              bitsets, inputStreamType, superClass,
              labelType, members, rewriteElementType,
              filterMode, ASTLabelType="object") ::= <<
[System.CodeDom.Compiler.GeneratedCode("ANTLR", "")]
[System.CLSCompliant(false)]
 partial class  : <@superClassName><@end>
{

	internal static readonly string[] tokenNames = new string[] {
		"\", "\", "\", "\", 
	};

	=;}; separator="\n">


	// delegates
	 ;}; separator="\n">


	// delegators
	 ;}; separator="\n">
	 gParent;}>



	public override void SetState(RecognizerSharedState state)
	{
		base.SetState(state);
		.SetState(state);}; separator="\n">
	}


	public override void SetTreeNodeStream(ITreeNodeStream input)
	{
		base.SetTreeNodeStream(input);
		.SetTreeNodeStream(input);}; separator="\n">
	}



	}>
	<@members()>

	public override string[] TokenNames { get { return .tokenNames; } }
	public override string GrammarFileName { get { return ""; } }

	

	[Conditional("ANTLR_TRACE")]
	protected virtual void OnCreated() {}
	[Conditional("ANTLR_TRACE")]
	protected virtual void EnterRule(string ruleName, int ruleIndex) {}
	[Conditional("ANTLR_TRACE")]
	protected virtual void LeaveRule(string ruleName, int ruleIndex) {}

	#region Rules
	
	#endregion Rules



	#region Delegated rules
  () { return .(}; separator=", ">); \}}; separator="\n">
	#endregion Delegated rules


	


	#region DFA
	 dfa;}; separator="\n">

	protected override void InitDFAs()
	{
		base.InitDFAs();
		 = new DFA( this, SpecialStateTransition );}; separator="\n">
	}

	
	#endregion DFA



	#region Follow sets
	private static class Follow
	{
		_in_}, words64=it.bits)>}; separator="\n">
	}
	#endregion Follow sets

}
>>

@genericParser.members() ::= <<
#if ANTLR_DEBUG
private static readonly bool[] decisionCanBacktrack =
	new bool[]
	{
		false, // invalid decision
		}; wrap="\n", separator=", ">
	};
#else
private static readonly bool[] decisionCanBacktrack = new bool[0];
#endif

 ( input }>)
	: this(input, new RecognizerSharedState()}>)
{
}
 ( input, RecognizerSharedState state }>)
	: base(input, state)
{

	 = new (input, state}>, this);}; separator="\n">


	 = .;}; separator="\n">


	;}>

	
	OnCreated();
}
>>

// imported grammars are 'public' (can't be internal because their return scope classes must be accessible)
parserModifier(grammar, actions) ::= <<
public
>>

parserCtorBody() ::= <<


this.state.ruleMemo = new System.Collections.Generic.Dictionary\[+1];<\n>


 = ;}; separator="\n">
>>

parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets,
       ASTLabelType="object", superClass={Antlr.Runtime.Parser}, labelType="IToken",
       members={}) ::= <<

>>

/** How to generate a tree parser; same as parser except the input
 *  stream is a different type.
 */
treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules,
           numRules, bitsets, filterMode, labelType={}, ASTLabelType="object",
           superClass={Antlr.Runtime.Tree.TreeRewriterTreeFilterTreeParser},
           members={}) ::= <<

>>

/** A simpler version of a rule template that is specific to the imaginary
 *  rules created for syntactic predicates.  As they never have return values
 *  nor parameters etc..., just give simplest possible method.  Don't do
 *  any of the normal memoization stuff in here either; it's a waste.
 *  As predicates cannot be inlined into the invoking rule, they need to
 *  be in a rule by themselves.
 */
synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::=
<<
[Conditional("ANTLR_TRACE")]
protected virtual void EnterRule__fragment() {}
[Conditional("ANTLR_TRACE")]
protected virtual void LeaveRule__fragment() {}

// $ANTLR start 
 void _fragment()
{
	
	EnterRule__fragment();
	EnterRule("_fragment", );
	TraceIn("_fragment", );
	try
	{
		
	}
	finally
	{
		TraceOut("_fragment", );
		LeaveRule("_fragment", );
		LeaveRule__fragment();
	}
}
// $ANTLR end 
>>

insertLexerSynpreds(synpreds) ::= <<

>>

insertSynpreds(synpreds) ::= <<

#region Synpreds
private bool EvaluatePredicate(System.Action fragment)
{
	bool success = false;
	state.backtracking++;
	<@start()>
	try { DebugBeginBacktrack(state.backtracking);
	int start = input.Mark();
	try
	{
		fragment();
	}
	catch ( RecognitionException re )
	{
		System.Console.Error.WriteLine("impossible: "+re);
	}
	success = !state.failed;
	input.Rewind(start);
	} finally { DebugEndBacktrack(state.backtracking, success); }
	<@stop()>
	state.backtracking--;
	state.failed=false;
	return success;
}
#endregion Synpreds

>>

ruleMemoization(name) ::= <<

if (state.backtracking > 0 && AlreadyParsedRule(input, )) {  }

>>

/** How to test for failure and return from rule */
checkRuleBacktrackFailure() ::= <<
if (state.failed) 
>>

/** This rule has failed, exit indicating failure during backtrack */
ruleBacktrackFailure() ::= <<
if (state.backtracking>0) {state.failed=true; }
>>

ruleWrapperMap ::= [
	"bottomup":{},
	"topdown":{},
	default:""
]

ruleWrapperBottomup() ::= <<

protected override IAstRuleReturnScopevoid Bottomup() { return bottomup(); }

>>

ruleWrapperTopdown() ::= <<

protected override IAstRuleReturnScopevoid Topdown() { return topdown(); }

>>

/** How to generate code for a rule.  This includes any return type
 *  data aggregates required for multiple return values.
 */
rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= <<



[Conditional("ANTLR_TRACE")]
protected virtual void EnterRule_() {}
[Conditional("ANTLR_TRACE")]
protected virtual void LeaveRule_() {}

// $ANTLR start ""
// :
[GrammarRule("")]
  ()
{
	EnterRule_();
	EnterRule("", );
	TraceIn("", );
    
    
    
    
	try { DebugEnterRule(GrammarFileName, "");
	DebugLocation(, );
	<@preamble()>
	try
	{
		
		
		
		<(ruleDescriptor.actions.after):execAction()>
	}

	<\n>}>



	

	catch (RecognitionException re)
	{
		ReportError(re);
		Recover(input,re);
	<@setErrorReturnValue()>
	}



	finally
	{
		TraceOut("", );
		LeaveRule("", );
		LeaveRule_();
        
        
        
    }
 	DebugLocation(, );
	} finally { DebugExitRule(GrammarFileName, ""); }
	<@postamble()>
	<\n>
}
// $ANTLR end ""
>>

// imported grammars need to have internal rules
ruleModifier(grammar,ruleDescriptor) ::= <<
internal
>>

// imported grammars need to have public return scopes
returnScopeModifier(grammar,ruleDescriptor) ::= <<
public
>>

catch(decl,action) ::= <<
catch ()
{
	
}
>>

ruleDeclarations() ::= <<

 retval = new ();
retval.Start = ()input.LT(1);

  = ;
}>


int _StartIndex = input.Index;

>>

ruleScopeSetUp() ::= <<
_stack.Push(new _scope());_scopeInit(_stack.Peek());}; separator="\n">
_stack.Push(new _scope());_scopeInit(_stack.Peek());}; separator="\n">
>>

ruleScopeCleanUp() ::= <<
_scopeAfter(_stack.Peek());_stack.Pop();}; separator="\n">
_scopeAfter(_stack.Peek());_stack.Pop();}; separator="\n">
>>

ruleLabelDefs() ::= <<
<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels,ruleDescriptor.wildcardTreeLabels,ruleDescriptor.wildcardTreeListLabels]
    :{it|  = default();}; separator="\n"
>
\> list_ = null;}; separator="\n"
>
<[ruleDescriptor.ruleListLabels,ruleDescriptor.wildcardTreeListLabels]
    :{it|List\<\> list_ = null;}; separator="\n"
>


>>

lexerRuleLabelDefs() ::= <<
<[ruleDescriptor.tokenLabels,
  ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleLabels]
    :{it|  = default();}; separator="\n"
>
<[ruleDescriptor.charListLabels,
  ruleDescriptor.charLabels]
	:{it|int  = 0;}; separator="\n"
>
<[ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleListLabels]
    :{it|List\<\> list_ = null;}; separator="\n"
>
 list_ = null;}; separator="\n"
>
>>

returnFromRule() ::= <%
return



 

 retval



;
%>

ruleCleanUp() ::= <<


retval.Stop = ()input.LT(-1);


>>

memoize() ::= <<


if (state.backtracking > 0) { Memoize(input, , _StartIndex); }


>>

/** How to generate a rule in the lexer; naked blocks are used for
 *  fragment rules.
 */
lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::= <<

[Conditional("ANTLR_TRACE")]
protected virtual void EnterRule_() {}
[Conditional("ANTLR_TRACE")]
protected virtual void LeaveRule_() {}

// $ANTLR start ""
[GrammarRule("")]
 void m()
{
	EnterRule_();
	EnterRule("", );
	TraceIn("", );
    
    
		try
		{

		
		
		
		

		int _type = ;
		int _channel = DefaultTokenChannel;
		
		
		
		
		
		state.type = _type;
		state.channel = _channel;
		<(ruleDescriptor.actions.after):execAction()>

	}
	finally
	{
		TraceOut("", );
		LeaveRule("", );
		LeaveRule_();
        
        
    }
}
// $ANTLR end ""
>>

/** How to generate code for the implicitly-defined lexer grammar rule
 *  that chooses between lexer rules.
 */
tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::= <<

public override void mTokens()
{
	<\n>
}
>>

// S U B R U L E S

/** A (...) subrule with multiple alternatives */
block(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
// :
int alt=;

<@predecision()>
try { DebugEnterSubRule();
try { DebugEnterDecision(, decisionCanBacktrack[]);

} finally { DebugExitDecision(); }
<@postdecision()>
<@prebranch()>
switch (alt)
{
}>
}
} finally { DebugExitSubRule(); }
<@postbranch()>
>>

/** A rule block with multiple alternatives */
ruleBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
// :
int alt=;

<@predecision()>
try { DebugEnterDecision(, decisionCanBacktrack[]);

} finally { DebugExitDecision(); }
<@postdecision()>
switch (alt)
{
}>
}
>>

ruleBlockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= <<
// :

<@prealt()>
DebugEnterAlt(1);

<@postalt()>
>>

/** A special case of a (...) subrule with a single alternative */
blockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= <<
// :

<@prealt()>
DebugEnterAlt(1);

<@postalt()>
>>

/** A (..)+ block with 1 or more alternatives */
positiveClosureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
// :
int cnt=0;

<@preloop()>
try { DebugEnterSubRule();
while (true)
{
	int alt=;
	<@predecision()>
	try { DebugEnterDecision(, decisionCanBacktrack[]);
	
	} finally { DebugExitDecision(); }
	<@postdecision()>
	switch (alt)
	{
	}>
	default:
		if (cnt >= 1)
			goto loop;

		
		EarlyExitException eee = new EarlyExitException( , input );
		DebugRecognitionException(eee);
		<@earlyExitException()>
		throw eee;
	}
	cnt++;
}
loop:
	;

} finally { DebugExitSubRule(); }
<@postloop()>
>>

positiveClosureBlockSingleAlt ::= positiveClosureBlock

/** A (..)* block with 1 or more alternatives */
closureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
// :

<@preloop()>
try { DebugEnterSubRule();
while (true)
{
	int alt=;
	<@predecision()>
	try { DebugEnterDecision(, decisionCanBacktrack[]);
	
	} finally { DebugExitDecision(); }
	<@postdecision()>
	switch ( alt )
	{
	}>
	default:
		goto loop;
	}
}

loop:
	;

} finally { DebugExitSubRule(); }
<@postloop()>
>>

closureBlockSingleAlt ::= closureBlock

/** Optional blocks (x)? are translated to (x|) by before code generation
 *  so we can just use the normal block template
 */
optionalBlock ::= block

optionalBlockSingleAlt ::= block

/** A case in a switch that jumps to an alternative given the alternative
 *  number.  A DFA predicts the alternative and then a simple switch
 *  does the jump to the code that actually matches that alternative.
 */
altSwitchCase(altNum,alt) ::= <<
case :
	<@prealt()>
	DebugEnterAlt();
	
	break;<\n>
>>

/** An alternative is just a list of elements; at outermost level */
alt(elements,altNum,description,autoAST,outerAlt,treeLevel,rew) ::= <<
// :
{
<@declarations()>


<@cleanup()>
}
>>

/** What to emit when there is no rewrite.  For auto build
 *  mode, does nothing.
 */
noRewrite(rewriteBlockLevel, treeLevel) ::= ""

// E L E M E N T S

/** Dump the elements one per line */
element(it) ::= <%
<@prematch()>
DebugLocation(, );<\n>
<\n>
%>

/** match a token optionally with a label in front */
tokenRef(token,label,elementIndex,terminalOptions={}) ::= <<