All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.cassandraunit.shaded.org.antlr.codegen.templates.Delphi.Delphi.stg Maven / Gradle / Ivy

There is a newer version: 4.3.1.0
Show newest version
/* [The "BSD license"]
 Copyright (c) 2008 Erik van Bilsen
 Copyright (c) 2007-2008 Johannes Luber
 Copyright (c) 2005-2007 Kunle Odutola
 Copyright (c) 2005-2006 Terence Parr
 All rights reserved.

 Redistribution and use in source and binary forms, with or without
 modification, are permitted provided that the following conditions
 are met:
 1. Redistributions of source code must retain the above copyright
    notice, this list of conditions and the following disclaimer.
 2. Redistributions in binary form must reproduce the above copyright
    notice, this list of conditions and the following disclaimer in the
    documentation and/or other materials provided with the distribution.
 3. The name of the author may not be used to endorse or promote products
    derived from this software without specific prior written permission.

 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
 IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
 OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
 IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
 INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
 NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
 THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
group Delphi;

csharpTypeInitMap ::= [
  "int":"0",
  "uint":"0",
  "long":"0",
  "ulong":"0",
  "float":"0.0",
  "double":"0.0",
  "bool":"False",
  "byte":"0",
  "sbyte":"0",
  "short":"0",
  "ushort":"0",
  "char":"#0",
  "string":"''",
  "String":"''",
  default:"nil" // anything other than an atomic type
]

/** The overall file structure of a recognizer; stores methods for rules
 *  and cyclic DFAs plus support code.
 *  LEXER (Boolean): should we generate lexer code?
 *  PARSER (Boolean): should we generate parser code?
 *  TREE_PARSER (Boolean): should we generate tree parser code?
 *  actionScope (String): 'lexer', 'parser', 'tree_parser' or custom scope
 *  actions (HashMap):
 *  docComment (String): document comment
 *  recognizer (Object): recognizer class generator
 *  name (String): name of grammar
 *  tokens (HashMap):
 *  tokenNames:
 *  rules:
 *  cyclicDFAs:
 *  bitsets:
 *  buildTemplate (Boolean): should we generate a string template?
 *  buildAST (Boolean): should we generate an AST?
 *  rewriteMode (Boolean): are we rewriteing nodes?
 *  profile (Boolean):
 *  backtracking (Boolean): backtracking mode?
 *  synpreds (): syntactic predicates
 *  memoize (Boolean): should we memoize?
 *  numRules (Integer): number of rules
 *  fileName (String): fully qualified name of original .g file
 *  ANTLRVersion (String): ANTLR version in Major.Minor.Build format
 *  generatedTimestamp (String): date/time when the file is generated
 *  trace (Boolean): should we trace input/output?
 *  scopes:
 *  superClass (String): name of base class, or empty string
 *  literals:
 */
outputFile(LEXER,PARSER,TREE_PARSER, actionScope, actions,
           docComment, recognizer,
           name, tokens, tokenNames, rules, cyclicDFAs,
     bitsets, buildTemplate, buildAST, rewriteMode, profile,
     backtracking, synpreds, memoize, numRules,
     fileName, ANTLRVersion, generatedTimestamp, trace,
     scopes, superClass, literals) ::=
<<
unit ;

{$HINTS OFF}

// $ANTLR   



interface

<@imports>
uses<\n>
<@end>
  

  Antlr.Runtime.Tree,<\n>

  Antlr.Runtime,
  Antlr.Runtime.Collections,
  Antlr.Runtime.Tools;



>>

/** Generates source code for the lexer class
 * grammar (Grammar object)
 */
lexer(grammar, name, tokens, scopes, rules, numRules, labelType="Token",
      filterMode, superClass="Lexer") ::= <<
type
  I = interface(I<@superClassName><@end>)
  end;

  T = class(T<@superClassName><@end>, I)
  strict private
    FCnt: array [0..] of Byte;
    FLA: array [0.., 0..255] of Integer;
    FException: ERecognitionException;
    procedure InitializeCyclicDFAs;
  
  public
    const
       = ;}; separator="\n">
    }>
  strict private
    
  public
    // delegates
    : I; {}}; separator="\n">
  public
    // delegators
    : Pointer; {}}; separator="\n">
    }}>
  protected
    { IBaseRecognizer }
    function GetGrammarFileName: String; override;

    function AlreadyParsedRule(const Input: IIntStream;
      const RuleIndex: Integer): Boolean; override;
    procedure Memoize(const Input: IIntStream; const RuleIndex,
      RuleStartIndex: Integer); override;
  protected
    { ILexer }
    function NextToken: IToken; override;<\n>

  protected
    { ILexer }
    procedure DoTokens; override;
  public
    constructor Create; overload;
    constructor Create(const AInput: ICharStream: IBaseRecognizer{}}>); overload;
    constructor Create(const AInput: ICharStream; const AState: IRecognizerSharedState: IBaseRecognizer{}}>); overload;

    }>
    }; separator="\n">
  end;

implementation

uses
  ,}; separator="\n">
  ,}; separator="\n">
  
  SysUtils,
  StrUtils,
  Math;

{ T }

constructor T.Create;
begin
  InitializeCyclicDFAs;
end;

constructor T.Create(const AInput: ICharStream: IBaseRecognizer{}}>);
begin
  Create(AInput, nil}>);
end;

constructor T.Create(const AInput: ICharStream; const AState: IRecognizerSharedState: IBaseRecognizer{}}>);
begin
  inherited Create(AInput, AState);
  InitializeCyclicDFAs; { TODO: Necessary in Delphi??? Not removed yet. }
  
  
  State.RuleMemoCount := +1;<\n> 
  
  
   := T.Create(AInput, State}>, Self);}; separator="\n">
   := Pointer(A);}; separator="\n">
  );}>
  
end;

function T.GetGrammarFileName: String;
begin
  Result := '';
end;






}>

procedure T.InitializeCyclicDFAs;
begin
   := TDFA.Create(Self<@debugAddition()>);}; separator="\n">
  FDFA.SpecialStateTransitionHandler := DFA_SpecialStateTransition;}; separator="\n">
end;

 
end.>>

lexerRuleDeclaration(rule) ::= <<
procedure m();<\n>
>>

/** A override of Lexer.nextToken() that backtracks over mTokens() looking
 *  for matches.  No error can be generated upon error; just rewind, consume
 *  a token and then try again.  backtracking needs to be set as well.
 *
 *  Make rule memoization happen only at levels above 1 as we start mTokens
 *  at backtracking==1.
 */
filteringNextToken() ::= <<
function T.NextToken: IToken;
var
  M: Integer;
begin
  while (True) do
  begin
    if (Input.LA(1) = Integer(cscEOF)) then
      Exit(TToken.EOF_TOKEN);

    State.Token := nil;
    State.Channel := TToken.DEFAULT_CHANNEL;
    State.TokenStartCharIndex := Input.Index;
    State.TokenStartCharPositionInLine := Input.CharPositionInLine;
    State.TokenStartLine := Input.Line;
    State.Text := '';
    try
      M := Input.Mark();
      State.Backtracking := 1; 
      State.Failed := False;
      mTokens();
      State.Backtracking := 0;

      if (State.Failed) then
      begin
        Input.Rewind(M);
        Input.Consume; 
      end
      else
      begin
        Emit;
        Exit(State.Token);
      end;
    except
      on RE: ERecognitionException do
      begin
        // shouldn't happen in backtracking mode, but...
        ReportError(RE);
        Recover(RE);
      end;
    end;
  end;
end;

function T.AlreadyParsedRule(const Input: IIntStream;
  const RuleIndex: Integer): Boolean;
begin
  if (State.Backtracking > 1) then
    Result := inherited AlreadyParsedRule(Input, RuleIndex)
  else
    Result := False;
end;

procedure T.Memoize(const Input: IIntStream; const RuleIndex,
  RuleStartIndex: Integer);
begin
  if (State.Backtracking > 1) then
    inherited Memoize(Input, RuleIndex, RuleStartIndex);
end;

>>

filteringActionGate() ::= "(State.Backtracking = 1)"

/** How to generate a parser */
genericParser(grammar, name, scopes, tokens, tokenNames, rules, numRules,
              bitsets, inputStreamType, superClass, filterMode,
              ASTLabelType="ANTLRInterface", labelType, members, rewriteElementType) ::= <<
type
  }>
  I = interface(I<@superClassName><@end>)
    }>
  end;

  T = class(T<@superClassName><@end>, I)

  public
    const
      TOKEN_NAMES: array [0..+3] of String = (
        '\',
        '\',
        '\',
        '\',
        );<\n>

  public
    const
       = ;}; separator="\n">
  public
    // delegates
    : I; {}}; separator="\n">
  public
    // delegators
    : Pointer; {}}; separator="\n">
    }}>

    }>
<@members>
    
  public
    constructor Create(const AInput: : IBaseRecognizer{}}>); overload;
    constructor Create(const AInput: ; const AState: IRecognizerSharedState: IBaseRecognizer{}}>); overload;
<@end>
  protected
    { IBaseRecognizer }
    function GetTokenNames: TStringArray; override;
    function GetGrammarFileName: String; override;
  strict private
    
  }>


    // Delegated rules
    }>

    }; separator="\n">
  
  strict private
    FException: ERecognitionException;
    FLA: array [0.., 0..255] of Integer;
    FCnt: array [0..] of Byte;
    procedure InitializeCyclicDFAs;

  public
    class var
      _in_})>
  public
    class procedure InitializeBitsets; static;<\n>

  end;

implementation

uses
  ,}; separator="\n">
  ,}; separator="\n">
  
  SysUtils,
  StrUtils,
  Math;

{ T }

constructor T.Create(const AInput: : IBaseRecognizer{}}>);
begin
  Create(AInput, TRecognizerSharedState.Create}>);
end;

constructor T.Create(const AInput: ;
  const AState: IRecognizerSharedState: IBaseRecognizer{}}>);
begin
  inherited Create(AInput, AState);
  <@membersConstructor>
  <@end>
  
   := T.Create(Input, State}>, Self);}; separator="\n">
   := .;}; separator="\n">
  );}>
  }>
  }>
  
end;


}; separator="\n">
procedure T.InitializeCyclicDFAs;
begin
   := TDFA.Create(Self);}; separator="\n">
  FDFA.SpecialStateTransitionHandler := DFA_SpecialStateTransition;}; separator="\n">
end;


class procedure T.InitializeBitsets;
begin
  _in_}, words64=it.bits)>
end;


<@membersImplementation>
 <@end>

function T.GetTokenNames: TStringArray;
var
  I: Integer;
begin
  SetLength(Result,Length(T.TOKEN_NAMES));
  for I := 0 to Length(T.TOKEN_NAMES) - 1 do
    Result[I] := T.TOKEN_NAMES[I];
end;

function T.GetGrammarFileName: String;
begin
  Result := '';
end;


}>

 

initialization
  T.InitializeBitsets;<\n>

end.>>

delegatedRuleDeclaration(ruleDescriptor) ::= <<

function (): I;<\n>


function (): ;<\n>

procedure ();<\n>


>>

delegatedRuleImplementation(ruleDescriptor) ::= <<

function T.(): I;<\n>


function T.(): ;<\n>

procedure T.();<\n>


begin
  Result := T(.Implementor).(}; separator=", ">);
end;

>>

parserCtorBody() ::= <<
InitializeCyclicDFAs;


State.RuleMemoCount := +1;<\n> 


 := Pointer(A);}; separator="\n">
>>

parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, ASTLabelType, superClass="Parser", labelType="Token", members={}) ::= <<

>>

/** How to generate a tree parser; same as parser except the input
 *  stream is a different type.
 */
treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules, numRules, bitsets, labelType={}, ASTLabelType="object", superClass="TreeParser", members={}, filterMode) ::= <<

>>

/** A simpler version of a rule template that is specific to the imaginary
 *  rules created for syntactic predicates.  As they never have return values
 *  nor parameters etc..., just give simplest possible method.  Don't do
 *  any of the normal memoization stuff in here either; it's a waste.
 *  As predicates cannot be inlined into the invoking rule, they need to
 *  be in a rule by themselves.
 */
synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::=
<<
// $ANTLR start ""
procedure T._fragment();
var
  Alt: array [0..] of Integer;
  
begin
  

  TraceIn('_fragment', );
  try
    
  finally
    TraceOut('_fragment', );
  end;

  

end;
// $ANTLR end ""
>>

synpredDecls(name) ::= <<
SynPredPointer ;<\n>
>>

synpred(name) ::= <<

function T.: Boolean;
var
  Start: Integer;
  Success: Boolean;
begin
  State.Backtracking := State.Backtracking + 1;
  <@start()>
  Start := Input.Mark;
  try
    _fragment(); // can never throw exception
  except
    on RE: ERecognitionException do
      WriteLn('Impossible: ' + RE.ToString);
  end;
  Success := not State.Failed;
  Input.Rewind(Start);
  <@stop()>
  State.Backtracking := State.Backtracking - 1;
  State.Failed := False;
  Result := Success;
end;<\n>
>>

lexerSynpred(name) ::= <<

>>

lexerSynpredDeclaration(name) ::= <<
function : Boolean;
procedure _fragment;
>>

synpredDeclaration(name) ::= <<
function : Boolean;
procedure _fragment;
>>

ruleMemoization(name) ::= <<

if ((State.Backtracking > 0) and AlreadyParsedRule(Input, )) then
  Exit();

>>

/** How to test for failure and return from rule */
checkRuleBacktrackFailure() ::= <<
<\n>if (State.Failed) then Exit();<\n>
>>

/** This rule has failed, exit indicating failure during backtrack */
ruleBacktrackFailure() ::= <<
if (State.Backtracking > 0) then
begin
  State.Failed := True;
  Exit();
end;
>>

genericParserRuleDeclaration(rule, ruleDescriptor) ::= <<




public

  function : I;<\n>


  function : ;<\n>

  procedure ;<\n>



>>

genericParserRuleInterface(rule, ruleDescriptor) ::= <<



function : I;<\n>


function : ;<\n>

procedure ;<\n>



>>

genericParserRuleReturnType(rule, ruleDescriptor) ::= <<



I = interface(ITreeParserRuleReturnScope)
end;<\n>


>>

/** How to generate code for a rule.  This includes any return type
 *  data aggregates required for multiple return values.
 */
rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= <<



// $ANTLR start ""
(* : *)

function T.(): I;


function T.(): ;

procedure T.();



var

  Locals: TLocalStorage;

  RetVal: I;<\n>


  RetVal: ;<\n>



  Alt: array [0..] of Integer;
  
  
begin
  Locals.Initialize;
  try
    TraceIn('', );
    
    
    
    
    <@preamble()>
    try
      try
        
        
        
        <(ruleDescriptor.actions.after):execAction()>

        <\n>}>



        

      except
        on RE: ERecognitionException do
        begin
          ReportError(RE);
          Recover(Input,RE);
          <@setErrorReturnValue()>
        end;<\n>



      end;
    finally
      TraceOut("", );
      
      
      
    end;
    <@postamble()>
  finally
    Locals.Finalize;
  end;
  Exit();
end;
// $ANTLR end ""
>>

catch(decl,action) ::= <<
catch ()
{
    
}
>>

ruleDeclarations() ::= <<

RetVal := T.Create;
RetVal.Start := Input.LT(1);<\n>

 := ;
}>


_StartIndex := Input.Index();

>>

ruleDeclarationVars() ::= <<


: ;
}>


_StartIndex: Integer;

>>

ruleScopeSetUp() ::= <<
Stack.Push(TScope.Create);}; separator="\n">
Stack.Push(TScope.Create);}; separator="\n">
>>

ruleScopeCleanUp() ::= <<
Stack.Pop();}; separator="\n">
Stack.Pop;}; separator="\n">
>>

ruleLabelDefs() ::= <<
<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels]:{ := nil;}; separator="\n">
<[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels]:{list_ := nil;}; separator="\n">

 := nil;}; separator="\n">
>>

ruleLabelDefVars() ::= <<
<[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels]:{: I;}; separator="\n">
<[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels]:{list_: IList\;}; separator="\n">

: ;}; separator="\n">
>>

lexerRuleLabelDefs() ::= <<
<[ruleDescriptor.tokenLabels,
  ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleLabels]
    :{ := nil;}; separator="\n"
>
;}; separator="\n">
<[ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleListLabels,
  ruleDescriptor.ruleListLabels]
    :{List_ := nil;}; separator="\n"
>
>>

lexerRuleLabelDefDeclarations() ::= <<
<[ruleDescriptor.tokenLabels,
  ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleLabels]
    :{: I;}; separator="\n"
>
;}; separator="\n">
<[ruleDescriptor.tokenListLabels,
  ruleDescriptor.ruleListLabels,
  ruleDescriptor.ruleListLabels]
    :{List_: IList;}; separator="\n"
>
>>

ruleReturnValue() ::= <<





RetVal





>>

ruleCleanUp() ::= <<


RetVal.Stop := Input.LT(-1);


>>

memoize() ::= <<


if (State.Backtracking > 0) then
  Memoize(Input, , _StartIndex);


>>

/** How to generate a rule in the lexer; naked blocks are used for
 *  fragment rules.
 */
lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::= <<
// $ANTLR start ""

procedure T.m();
var
  
  Locals: TLocalStorage;
  TokenType, Channel: Integer;
  Alt: array [0..] of Integer;
  
begin
  Locals.Initialize;
  try
    
    TraceIn("", );
    
    
    try

      
      
      
      <\n>

      TokenType := ;
      Channel := DEFAULT_TOKEN_CHANNEL;
      
      
      
      
      
      State.TokenType := TokenType;
      State.Channel := Channel;
      <(ruleDescriptor.actions.after):execAction()>

    finally
      TraceOut("", );
      
      
    end;
  finally
    Locals.Finalize;
  end;
end;
// $ANTLR end ""
>>

/** How to generate code for the implicitly-defined lexer grammar rule
 *  that chooses between lexer rules.
 */
tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::= <<
procedure T.mTokens;
var
  Alt: array [0..] of Integer;
begin
  
end;

procedure T.DoTokens;
begin
  mTokens;
end;
>>

// S U B R U L E S

/** A (...) subrule with multiple alternatives */
block(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
(* : *)
Alt[] := ;

<@predecision()>

<@postdecision()>
<@prebranch()>
case Alt[] of
  
end;
<@postbranch()>
>>

/** A rule block with multiple alternatives */
ruleBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
(* : *)
Alt[] := ;

<@predecision()>

<@postdecision()>
case Alt[] of
  
end;
>>

ruleBlockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= <<
(* : *)

<@prealt()>

<@postalt()>
>>

/** A special case of a (...) subrule with a single alternative */
blockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= <<
(* : *)

<@prealt()>

<@postalt()>
>>

/** A (..)+ block with 1 or more alternatives */
positiveClosureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
(* : *)
FCnt[] := 0;

<@preloop()>
while (True) do
begin
  Alt[] := ;
  <@predecision()>
  
  <@postdecision()>
  case Alt[] of
    
  else
    begin
      if (FCnt[] >= 1) then
        Break;
      
      raise EEarlyExitException.Create(, Input);
      <@earlyExitException()>
    end;
  end;
  Inc(FCnt[]);
end;
<@postloop()>
>>

positiveClosureBlockSingleAlt ::= positiveClosureBlock

/** A (..)* block with 1 or more alternatives */
closureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= <<
(* : *)

<@preloop()>
while (True) do
begin
  Alt[] := ;
  <@predecision()>
  
  <@postdecision()>
  case Alt[] of
    
  else
    Break;
  end;
end;
<@postloop()>
>>

closureBlockSingleAlt ::= closureBlock

/** Optional blocks (x)? are translated to (x|) by before code generation
 *  so we can just use the normal block template
 */
optionalBlock ::= block

optionalBlockSingleAlt ::= block

/** A case in a switch that jumps to an alternative given the alternative
 *  number.  A DFA predicts the alternative and then a simple switch
 *  does the jump to the code that actually matches that alternative.
 */
altSwitchCase() ::= <<
:
  <@prealt()>
  <\n>
>>

/** An alternative is just a list of elements; at outermost level */
alt(elements,altNum,description,autoAST,outerAlt,treeLevel,rew) ::= <<
(* : *)
begin
  <@declarations()>
  
  
  <@cleanup()>
end;
>>

/** What to emit when there is no rewrite.  For auto build
 *  mode, does nothing.
 */
noRewrite(rewriteBlockLevel, treeLevel) ::= ""

// E L E M E N T S

/** Dump the elements one per line */
element() ::= <<
<@prematch()>

>>

/** match a token optionally with a label in front */
tokenRef(token,label,elementIndex,terminalOptions) ::= <<