Logo Search packages:      
Sourcecode: antlr version File versions

void antlr::CSharpCodeGenerator::genNextToken (  )  [inline]

Generate the nextToken() rule. nextToken() is a synthetic lexer rule that is the implicit OR of all user-defined lexer rules.

Definition at line 2496 of file CSharpCodeGenerator.java.

References antlr::CodeGenerator::_println(), antlr::Grammar::antlrTool, antlr::Alternative::cache, antlr::Lookahead::containsEpsilon(), genBlockFinish(), genCommonBlock(), antlr::AlternativeBlock::getAlternativeAt(), antlr::AlternativeBlock::getAlternatives(), antlr::RuleBlock::getDefaultErrorHandler(), antlr::Grammar::getSymbol(), antlr::Alternative::head, antlr::Grammar::isDefined(), and antlr::CodeGenerator::println().

                                 {
            // Are there any public rules?  If not, then just generate a
            // fake nextToken().
            boolean hasPublicRules = false;
            for (int i = 0; i < grammar.rules.size(); i++) {
                  RuleSymbol rs = (RuleSymbol)grammar.rules.elementAt(i);
                  if ( rs.isDefined() && rs.access.equals("public") ) {
                        hasPublicRules = true;
                        break;
                  }
            }
            if (!hasPublicRules) {
                  println("");
                  println("override public new Token nextToken()\t\t\t//throws TokenStreamException");
                  println("{");
                  tabs++;
                  println("try");
                  println("{");
                  tabs++;
                  println("uponEOF();");
                  tabs--;
                  println("}");
                  println("catch(CharStreamIOException csioe)");
                  println("{");
                  tabs++;
                  println("throw new TokenStreamIOException(csioe.io);");
                  tabs--;
                  println("}");
                  println("catch(CharStreamException cse)");
                  println("{");
                  tabs++;
                  println("throw new TokenStreamException(cse.Message);");
                  tabs--;
                  println("}");
                  println("return new CommonToken(Token.EOF_TYPE, \"\");");
                  tabs--;
                  println("}");
                  println("");
                  return;
            }

            // Create the synthesized nextToken() rule
            RuleBlock nextTokenBlk = MakeGrammar.createNextTokenRule(grammar, grammar.rules, "nextToken");
            // Define the nextToken rule symbol
            RuleSymbol nextTokenRs = new RuleSymbol("mnextToken");
            nextTokenRs.setDefined();
            nextTokenRs.setBlock(nextTokenBlk);
            nextTokenRs.access = "private";
            grammar.define(nextTokenRs);
            // Analyze the nextToken rule
            boolean ok = grammar.theLLkAnalyzer.deterministic(nextTokenBlk);

            // Generate the next token rule
            String filterRule=null;
            if ( ((LexerGrammar)grammar).filterMode ) {
                  filterRule = ((LexerGrammar)grammar).filterRule;
            }

            println("");
            println("public new Token nextToken()\t\t\t//throws TokenStreamException");
            println("{");
            tabs++;
            println("Token theRetToken = null;");
            _println("tryAgain:");
            println("for (;;)");
            println("{");
            tabs++;
            println("Token _token = null;");
            println("int _ttype = Token.INVALID_TYPE;");
            if ( ((LexerGrammar)grammar).filterMode ) {
                  println("setCommitToPath(false);");
                  if ( filterRule!=null ) {
                        // Here's a good place to ensure that the filter rule actually exists
            if (!grammar.isDefined(CodeGenerator.encodeLexerRuleName(filterRule))) {
                  grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer");
                        }
                        else {
                              RuleSymbol rs = (RuleSymbol)grammar.getSymbol(CodeGenerator.encodeLexerRuleName(filterRule));
                              if ( !rs.isDefined() ) {
                                    grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer");
                              }
                              else if ( rs.access.equals("public") ) {
                                    grammar.antlrTool.error("Filter rule " + filterRule + " must be protected");
                              }
                        }
                        println("int _m;");
                        println("_m = mark();");
                  }
            }
            println("resetText();");

            println("try     // for char stream error handling");
            println("{");
            tabs++;

            // Generate try around whole thing to trap scanner errors
            println("try     // for lexical error handling");
            println("{");
            tabs++;

            // Test for public lexical rules with empty paths
            for (int i=0; i<nextTokenBlk.getAlternatives().size(); i++) {
                  Alternative a = nextTokenBlk.getAlternativeAt(i);
                  if ( a.cache[1].containsEpsilon() ) {
                        //String r = a.head.toString();
            RuleRefElement rr = (RuleRefElement)a.head;
            String r = CodeGenerator.decodeLexerRuleName(rr.targetRule);
            antlrTool.warning("public lexical rule "+r+" is optional (can match \"nothing\")");
                  }
            }

            // Generate the block
            String newline = System.getProperty("line.separator");
            CSharpBlockFinishingInfo howToFinish = genCommonBlock(nextTokenBlk, false);
            String errFinish = "if (LA(1)==EOF_CHAR) { uponEOF(); returnToken_ = makeToken(Token.EOF_TYPE); }";
            errFinish += newline+"\t\t\t\t";
            if ( ((LexerGrammar)grammar).filterMode ) {
                  if ( filterRule==null ) {
                  //kunle: errFinish += "else { consume(); continue tryAgain; }";
                  errFinish += "\t\t\t\telse";
                  errFinish += "\t\t\t\t{";
                  errFinish += "\t\t\t\t\tconsume();";
                  errFinish += "\t\t\t\t\tgoto tryAgain;";
                  errFinish += "\t\t\t\t}";
                  }
                  else {
                        errFinish += "\t\t\t\t\telse"+newline+
                              "\t\t\t\t\t{"+newline+
                              "\t\t\t\t\tcommit();"+newline+
                              "\t\t\t\t\ttry {m"+filterRule+"(false);}"+newline+
                              "\t\t\t\t\tcatch(RecognitionException e)"+newline+
                              "\t\t\t\t\t{"+newline+
                              "\t\t\t\t\t // catastrophic failure"+newline+
                              "\t\t\t\t\t reportError(e);"+newline+
                              "\t\t\t\t\t consume();"+newline+
                              "\t\t\t\t\t}"+newline+
                              "\t\t\t\t\tgoto tryAgain;"+newline+
                              "\t\t\t\t}";
                  }
            }
            else {
                  errFinish += "else {"+throwNoViable+"}";
            }
            genBlockFinish(howToFinish, errFinish);

            // at this point a valid token has been matched, undo "mark" that was done
            if ( ((LexerGrammar)grammar).filterMode && filterRule!=null ) {
                  println("commit();");
            }

            // Generate literals test if desired
            // make sure _ttype is set first; note returnToken_ must be
            // non-null as the rule was required to create it.
            println("if ( null==returnToken_ ) goto tryAgain; // found SKIP token");
            println("_ttype = returnToken_.Type;");
            if ( ((LexerGrammar)grammar).getTestLiterals()) {
                  genLiteralsTest();
            }

            // return token created by rule reference in switch
            println("returnToken_.Type = _ttype;");
            println("return returnToken_;");

            // Close try block
            tabs--;
            println("}");
            println("catch (RecognitionException e) {");
            tabs++;
            if ( ((LexerGrammar)grammar).filterMode ) {
                  if ( filterRule==null ) {
                        println("if (!getCommitToPath())");
                        println("{");
                        tabs++;
                        println("consume();");
                        println("goto tryAgain;");
                        tabs--;
                        println("}");
                  }
                  else {
                        println("if (!getCommitToPath())");
                        println("{");
                        tabs++;
                        println("rewind(_m);");
                        println("resetText();");
                        println("try {m"+filterRule+"(false);}");
                        println("catch(RecognitionException ee) {");
                        println("   // horrendous failure: error in filter rule");
                        println("   reportError(ee);");
                        println("   consume();");
                        println("}");
                        //println("goto tryAgain;");
                        tabs--;
                        println("}");
                        println("else");
                  }
            }
            if ( nextTokenBlk.getDefaultErrorHandler() ) {
                  println("{");
                  tabs++;
                  println("reportError(e);");
                  println("consume();");
                  tabs--;
                  println("}");
            }
            else {
                  // pass on to invoking routine
                  tabs++;
                  println("throw new TokenStreamRecognitionException(e);");
                  tabs--;
            }
            tabs--;
            println("}");

            // close CharStreamException try
            tabs--;
            println("}");
            println("catch (CharStreamException cse) {");
            println("   if ( cse is CharStreamIOException ) {");
            println("         throw new TokenStreamIOException(((CharStreamIOException)cse).io);");
            println("   }");
            println("   else {");
            println("         throw new TokenStreamException(cse.Message);");
            println("   }");
            println("}");

            // close for-loop
            tabs--;
            println("}");

            // close method nextToken
            tabs--;
            println("}");
            println("");
      }


Generated by  Doxygen 1.6.0   Back to index