Logo Search packages:      
Sourcecode: antlr version File versions

void antlr::CppCodeGenerator::genNextToken (  )  [inline]

Generate the nextToken() rule. nextToken() is a synthetic lexer rule that is the implicit OR of all user-defined lexer rules.

Parameters:
RuleBlock 

Definition at line 2962 of file CppCodeGenerator.java.

References _println(), antlr::Grammar::antlrTool, antlr::Alternative::cache, antlr::Lookahead::containsEpsilon(), genBlockFinish(), genCommonBlock(), antlr::AlternativeBlock::getAlternativeAt(), antlr::AlternativeBlock::getAlternatives(), antlr::Grammar::getClassName(), antlr::RuleBlock::getDefaultErrorHandler(), antlr::Grammar::getSymbol(), antlr::Grammar::isDefined(), and println().

                                 {
            // Are there any public rules?  If not, then just generate a
            // fake nextToken().
            boolean hasPublicRules = false;
            for (int i = 0; i < grammar.rules.size(); i++) {
                  RuleSymbol rs = (RuleSymbol)grammar.rules.elementAt(i);
                  if ( rs.isDefined() && rs.access.equals("public") ) {
                        hasPublicRules = true;
                        break;
                  }
            }
            if (!hasPublicRules) {
                  println("");
                  println(namespaceAntlr+"RefToken "+grammar.getClassName()+"::nextToken() { return "+namespaceAntlr+"RefToken(new "+namespaceAntlr+"CommonToken("+namespaceAntlr+"Token::EOF_TYPE, \"\")); }");
                  println("");
                  return;
            }

            // Create the synthesized nextToken() rule
            RuleBlock nextTokenBlk = MakeGrammar.createNextTokenRule(grammar, grammar.rules, "nextToken");
            // Define the nextToken rule symbol
            RuleSymbol nextTokenRs = new RuleSymbol("mnextToken");
            nextTokenRs.setDefined();
            nextTokenRs.setBlock(nextTokenBlk);
            nextTokenRs.access = "private";
            grammar.define(nextTokenRs);
            // Analyze the nextToken rule
            boolean ok = grammar.theLLkAnalyzer.deterministic(nextTokenBlk);

            // Generate the next token rule
            String filterRule=null;
            if ( ((LexerGrammar)grammar).filterMode ) {
                  filterRule = ((LexerGrammar)grammar).filterRule;
            }

            println("");
            println(namespaceAntlr+"RefToken "+grammar.getClassName()+"::nextToken()");
            println("{");
            tabs++;
            println(namespaceAntlr+"RefToken theRetToken;");
            println("for (;;) {");
            tabs++;
            println(namespaceAntlr+"RefToken theRetToken;");
            println("int _ttype = "+namespaceAntlr+"Token::INVALID_TYPE;");
            if ( ((LexerGrammar)grammar).filterMode ) {
                  println("setCommitToPath(false);");
                  if ( filterRule!=null ) {
                        // Here's a good place to ensure that the filter rule actually exists
                        if ( !grammar.isDefined(CodeGenerator.encodeLexerRuleName(filterRule)) ) {
                              grammar.antlrTool.error("Filter rule "+filterRule+" does not exist in this lexer");
                        }
                        else {
                              RuleSymbol rs = (RuleSymbol)grammar.getSymbol(CodeGenerator.encodeLexerRuleName(filterRule));
                              if ( !rs.isDefined() ) {
                                    grammar.antlrTool.error("Filter rule "+filterRule+" does not exist in this lexer");
                              }
                              else if ( rs.access.equals("public") ) {
                                    grammar.antlrTool.error("Filter rule "+filterRule+" must be protected");
                              }
                        }
                        println("int _m;");
                        println("_m = mark();");
                  }
            }
            println("resetText();");

            // Generate try around whole thing to trap scanner errors
            println("try {   // for lexical and char stream error handling");
            tabs++;

            // Test for public lexical rules with empty paths
            for (int i=0; i<nextTokenBlk.getAlternatives().size(); i++) {
                  Alternative a = nextTokenBlk.getAlternativeAt(i);
                  if ( a.cache[1].containsEpsilon() ) {
                        antlrTool.warning("found optional path in nextToken()");
                  }
            }

            // Generate the block
            String newline = System.getProperty("line.separator");
            CppBlockFinishingInfo howToFinish = genCommonBlock(nextTokenBlk, false);
            String errFinish = "if (LA(1)==EOF_CHAR)"+newline+
                  "\t\t\t\t{"+newline+"\t\t\t\t\tuponEOF();"+newline+
                  "\t\t\t\t\t_returnToken = makeToken("+namespaceAntlr+"Token::EOF_TYPE);"+
                  newline+"\t\t\t\t}";
            errFinish += newline+"\t\t\t\t";
            if ( ((LexerGrammar)grammar).filterMode ) {
                  if ( filterRule==null ) {
                        errFinish += "else {consume(); goto tryAgain;}";
                  }
                  else {
                        errFinish += "else {"+newline+
                                    "\t\t\t\t\tcommit();"+newline+
                                    "\t\t\t\t\ttry {m"+filterRule+"(false);}"+newline+
                                    "\t\t\t\t\tcatch("+namespaceAntlr+"RecognitionException& e) {"+newline+
                                    "\t\t\t\t\t // catastrophic failure"+newline+
                                    "\t\t\t\t\t reportError(e);"+newline+
                                    "\t\t\t\t\t consume();"+newline+
                                    "\t\t\t\t\t}"+newline+
                                    "\t\t\t\t\tgoto tryAgain;"+newline+
                                    "\t\t\t\t}";
                  }
            }
            else {
                  errFinish += "else {"+throwNoViable+"}";
            }
            genBlockFinish(howToFinish, errFinish);

            // at this point a valid token has been matched, undo "mark" that was done
            if ( ((LexerGrammar)grammar).filterMode && filterRule!=null ) {
                  println("commit();");
            }

            // Generate literals test if desired
            // make sure _ttype is set first; note _returnToken must be
            // non-null as the rule was required to create it.
            println("if ( !_returnToken )"+newline+
                          "\t\t\t\tgoto tryAgain; // found SKIP token"+newline);
            println("_ttype = _returnToken->getType();");
            if ( ((LexerGrammar)grammar).getTestLiterals()) {
                  genLiteralsTest();
            }

            // return token created by rule reference in switch
            println("_returnToken->setType(_ttype);");
            println("return _returnToken;");

            // Close try block
            tabs--;
            println("}");
            println("catch ("+namespaceAntlr+"RecognitionException& e) {");
            tabs++;
            if ( ((LexerGrammar)grammar).filterMode ) {
                  if ( filterRule==null ) {
                        println("if ( !getCommitToPath() ) {");
                        tabs++;
                        println("consume();");
                        println("goto tryAgain;");
                        tabs--;
                        println("}");
                  }
                  else {
                        println("if ( !getCommitToPath() ) {");
                        tabs++;
                        println("rewind(_m);");
                        println("resetText();");
                        println("try {m"+filterRule+"(false);}");
                        println("catch("+namespaceAntlr+"RecognitionException& ee) {");
                        println("   // horrendous failure: error in filter rule");
                        println("   reportError(ee);");
                        println("   consume();");
                        println("}");
                        // println("goto tryAgain;");
                        tabs--;
                        println("}");
                        println("else");
                  }
            }
            if ( nextTokenBlk.getDefaultErrorHandler() ) {
                  println("{");
                  tabs++;
                  println("reportError(e);");
                  println("consume();");
                  tabs--;
                  println("}");
            }
            else {
                // pass on to invoking routine
          tabs++;
                println("throw "+namespaceAntlr+"TokenStreamRecognitionException(e);");
                   tabs--;
            }

            // close CharStreamException try
            tabs--;
            println("}");
            println("catch ("+namespaceAntlr+"CharStreamIOException& csie) {");
            println("\tthrow "+namespaceAntlr+"TokenStreamIOException(csie.io);");
            println("}");
            println("catch ("+namespaceAntlr+"CharStreamException& cse) {");
            println("\tthrow "+namespaceAntlr+"TokenStreamException(cse.getMessage());");
            println("}");

            // close for-loop
            _println("tryAgain:;");
            tabs--;
            println("}");

            // close method nextToken
            tabs--;
            println("}");
            println("");
      }


Generated by  Doxygen 1.6.0   Back to index