/* [The "BSD licence"] Copyright (c) 2007-2008 Johannes Luber Copyright (c) 2005-2007 Kunle Odutola Copyright (c) 2005 Terence Parr All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ group CSharp implements ANTLRCore; csharpTypeInitMap ::= [ "int":"0", "uint":"0", "long":"0", "ulong":"0", "float":"0.0", "double":"0.0", "bool":"false", "byte":"0", "sbyte":"0", "short":"0", "ushort":"0", "char":"char.MinValue", default:"null" // anything other than an atomic type ] /** The overall file structure of a recognizer; stores methods for rules * and cyclic DFAs plus support code. */ outputFile(LEXER,PARSER,TREE_PARSER, actionScope, actions, docComment, recognizer, name, tokens, tokenNames, rules, cyclicDFAs, bitsets, buildTemplate, buildAST, rewriteMode, profile, backtracking, synpreds, memoize, numRules, fileName, ANTLRVersion, generatedTimestamp, trace, scopes, superClass, literals) ::= << // $ANTLR namespace { <@imports> using System; using Antlr.Runtime; using Antlr.Runtime.Tree; using IList = System.Collections.IList; using ArrayList = System.Collections.ArrayList; using Stack = Antlr.Runtime.Collections.StackList; using IDictionary = System.Collections.IDictionary; using Hashtable = System.Collections.Hashtable; <@end> } >> lexer(grammar, name, tokens, scopes, rules, numRules, labelType="IToken", filterMode, superClass="Lexer") ::= << public class : <@superClassName><@end> { = ;}; separator="\n"> }> // delegates ;}; separator="\n"> // delegators ;}; separator="\n"> gParent;}> public () { InitializeCyclicDFAs(); } public (ICharStream input }>) : this(input, null}>) { } public (ICharStream input, RecognizerSharedState state }>) : base(input, state) { InitializeCyclicDFAs(); state.ruleMemo = new Hashtable[+1];<\n> = new (input, state}>, this);}; separator="\n"> = ;}; separator="\n"> ;}> } override public string GrammarFileName { get { return "";} } }> dfa;}; separator="\n"> private void InitializeCyclicDFAs() { = new DFA(this<@debugAddition()>);}; separator="\n"> this.dfa.specialStateTransitionHandler = new DFA.SpecialStateTransitionHandler(DFA_SpecialStateTransition);}; separator="\n"> } } >> /** A override of Lexer.nextToken() that backtracks over mTokens() looking * for matches. No error can be generated upon error; just rewind, consume * a token and then try again. backtracking needs to be set as well. * * Make rule memoization happen only at levels above 1 as we start mTokens * at backtracking==1. */ filteringNextToken() ::= << override public IToken NextToken() { while (true) { if ( input.LA(1) == (int)CharStreamConstants.EOF ) { return Token.EOF_TOKEN; } state.token = null; state.channel = Token.DEFAULT_CHANNEL; state.tokenStartCharIndex = input.Index(); state.tokenStartCharPositionInLine = input.CharPositionInLine; state.tokenStartLine = input.Line; state.text = null; try { int m = input.Mark(); state.backtracking = 1; state.failed = false; mTokens(); state.backtracking = 0; if ( state.failed ) { input.Rewind(m); input.Consume(); } else { Emit(); return state.token; } } catch (RecognitionException re) { // shouldn't happen in backtracking mode, but... ReportError(re); Recover(re); } } } override public void Memoize(IIntStream input, int ruleIndex, int ruleStartIndex) { if ( state.backtracking > 1 ) base.Memoize(input, ruleIndex, ruleStartIndex); } override public bool AlreadyParsedRule(IIntStream input, int ruleIndex) { if ( state.backtracking>1 ) return base.AlreadyParsedRule(input, ruleIndex); return false; } >> filteringActionGate() ::= "(state.backtracking == 1)" /** How to generate a parser */ genericParser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, inputStreamType, superClass, ASTLabelType="object", labelType, members, rewriteElementType) ::= << public class : <@superClassName><@end> { public static readonly string[] tokenNames = new string[] { "\", "\", "\", "\", };<\n> = ;}; separator="\n"> // delegates ;}; separator="\n"> // delegators ;}; separator="\n"> gParent;}> }> <@members> public ( input }>) : this(input, new RecognizerSharedState()}>) { } public ( input, RecognizerSharedState state }>) : base(input, state) { InitializeCyclicDFAs(); = new (input, state}>, this);}; separator="\n"> = .;}; separator="\n"> ;}> } <@end> override public string[] TokenNames { get { return .tokenNames; } } override public string GrammarFileName { get { return ""; } } // Delegated rules () // throws RecognitionException \{ return .(}; separator=", ">); \}}; separator="\n"> }> dfa;}; separator="\n"> private void InitializeCyclicDFAs() { = new DFA(this);}; separator="\n"> this.dfa.specialStateTransitionHandler = new DFA.SpecialStateTransitionHandler(DFA_SpecialStateTransition);}; separator="\n"> } _in_}, words64=it.bits)> } >> parserCtorBody() ::= << this.state.ruleMemo = new Hashtable[+1];<\n> = ;}; separator="\n"> >> parser(grammar, name, scopes, tokens, tokenNames, rules, numRules, bitsets, ASTLabelType, superClass="Parser", labelType="IToken", members={}) ::= << >> /** How to generate a tree parser; same as parser except the input * stream is a different type. */ treeParser(grammar, name, scopes, tokens, tokenNames, globalAction, rules, numRules, bitsets, labelType={}, ASTLabelType="object", superClass="TreeParser", members={}) ::= << >> /** A simpler version of a rule template that is specific to the imaginary * rules created for syntactic predicates. As they never have return values * nor parameters etc..., just give simplest possible method. Don't do * any of the normal memoization stuff in here either; it's a waste. * As predicates cannot be inlined into the invoking rule, they need to * be in a rule by themselves. */ synpredRule(ruleName, ruleDescriptor, block, description, nakedBlock) ::= << // $ANTLR start "" public void _fragment() //throws RecognitionException { TraceIn("_fragment", ); try { } finally { TraceOut("_fragment", ); } } // $ANTLR end "" >> synpredDecls(name) ::= << SynPredPointer ;<\n> >> synpred(name) ::= << public bool () { state.backtracking++; <@start()> int start = input.Mark(); try { _fragment(); // can never throw exception } catch (RecognitionException re) { Console.Error.WriteLine("impossible: "+re); } bool success = !state.failed; input.Rewind(start); <@stop()> state.backtracking--; state.failed = false; return success; }<\n> >> lexerSynpred(name) ::= << >> ruleMemoization(name) ::= << if ( (state.backtracking > 0) && AlreadyParsedRule(input, ) ) { return ; } >> /** How to test for failure and return from rule */ checkRuleBacktrackFailure() ::= << if (state.failed) return ; >> /** This rule has failed, exit indicating failure during backtrack */ ruleBacktrackFailure() ::= << if ( state.backtracking > 0 ) {state.failed = true; return ;} >> /** How to generate code for a rule. This includes any return type * data aggregates required for multiple return values. */ rule(ruleName,ruleDescriptor,block,emptyRule,description,exceptions,finally,memoize) ::= << // $ANTLR start "" // : public () // throws RecognitionException [1] { TraceIn("", ); <@preamble()> try { <(ruleDescriptor.actions.after):execAction()> } <\n>}> catch (RecognitionException re) { ReportError(re); Recover(input,re); <@setErrorReturnValue()> }<\n> finally { TraceOut("", ); } <@postamble()> return ; } // $ANTLR end "" >> catch(decl,action) ::= << catch () { } >> ruleDeclarations() ::= << retval = new (); retval.Start = input.LT(1);<\n> = ; }> int _StartIndex = input.Index(); >> ruleScopeSetUp() ::= << _stack.Push(new _scope());}; separator="\n"> _stack.Push(new _scope());}; separator="\n"> >> ruleScopeCleanUp() ::= << _stack.Pop();}; separator="\n"> _stack.Pop();}; separator="\n"> >> ruleLabelDefs() ::= << <[ruleDescriptor.tokenLabels,ruleDescriptor.tokenListLabels] :{ = null;}; separator="\n" > <[ruleDescriptor.tokenListLabels,ruleDescriptor.ruleListLabels] :{IList list_ = null;}; separator="\n" > = null;}; separator="\n"> >> lexerRuleLabelDefs() ::= << <[ruleDescriptor.tokenLabels, ruleDescriptor.tokenListLabels, ruleDescriptor.ruleLabels] :{ = null;}; separator="\n" > ;}; separator="\n"> <[ruleDescriptor.tokenListLabels, ruleDescriptor.ruleListLabels, ruleDescriptor.ruleListLabels] :{IList list_ = null;}; separator="\n" > >> ruleReturnValue() ::= << retval >> ruleCleanUp() ::= << retval.Stop = input.LT(-1);<\n> >> memoize() ::= << if ( state.backtracking > 0 ) { Memoize(input, , _StartIndex); } >> /** How to generate a rule in the lexer; naked blocks are used for * fragment rules. */ lexerRule(ruleName,nakedBlock,ruleDescriptor,block,memoize) ::= << // $ANTLR start "" public void m() // throws RecognitionException [2] { TraceIn("", ); try { <\n> int _type = ; int _channel = DEFAULT_TOKEN_CHANNEL; state.type = _type; state.channel = _channel; <(ruleDescriptor.actions.after):execAction()> } finally { TraceOut("", ); } } // $ANTLR end "" >> /** How to generate code for the implicitly-defined lexer grammar rule * that chooses between lexer rules. */ tokensRule(ruleName,nakedBlock,args,block,ruleDescriptor) ::= << override public void mTokens() // throws RecognitionException { <\n> } >> // S U B R U L E S /** A (...) subrule with multiple alternatives */ block(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << // : int alt = ; <@predecision()> <@postdecision()> <@prebranch()> switch (alt) { } <@postbranch()> >> /** A rule block with multiple alternatives */ ruleBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << // : int alt = ; <@predecision()> <@postdecision()> switch (alt) { } >> ruleBlockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << // : <@prealt()> <@postalt()> >> /** A special case of a (...) subrule with a single alternative */ blockSingleAlt(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,description) ::= << // : <@prealt()> <@postalt()> >> /** A (..)+ block with 1 or more alternatives */ positiveClosureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << // : int cnt = 0; <@preloop()> do { int alt = ; <@predecision()> <@postdecision()> switch (alt) { default: if ( cnt >= 1 ) goto loop; EarlyExitException eee = new EarlyExitException(, input); <@earlyExitException()> throw eee; } cnt++; } while (true); loop: ; // Stops C# compiler whinging that label 'loop' has no statements <@postloop()> >> positiveClosureBlockSingleAlt ::= positiveClosureBlock /** A (..)* block with 1 or more alternatives */ closureBlock(alts,decls,decision,enclosingBlockLevel,blockLevel,decisionNumber,maxK,maxAlt,description) ::= << // : <@preloop()> do { int alt = ; <@predecision()> <@postdecision()> switch (alt) { default: goto loop; } } while (true); loop: ; // Stops C# compiler whining that label 'loop' has no statements <@postloop()> >> closureBlockSingleAlt ::= closureBlock /** Optional blocks (x)? are translated to (x|) by before code generation * so we can just use the normal block template */ optionalBlock ::= block optionalBlockSingleAlt ::= block /** A case in a switch that jumps to an alternative given the alternative * number. A DFA predicts the alternative and then a simple switch * does the jump to the code that actually matches that alternative. */ altSwitchCase() ::= << case : <@prealt()> break;<\n> >> /** An alternative is just a list of elements; at outermost level */ alt(elements,altNum,description,autoAST,outerAlt,treeLevel,rew) ::= << // : { <@declarations()> <@cleanup()> } >> /** What to emit when there is no rewrite. For auto build * mode, does nothing. */ noRewrite(rewriteBlockLevel, treeLevel) ::= "" // E L E M E N T S /** Dump the elements one per line */ element() ::= << <@prematch()> <\n> >> /** match a token optionally with a label in front */ tokenRef(token,label,elementIndex,hetero) ::= <<