Logo Search packages:      
Sourcecode: antlr version File versions

void antlr::JavaCodeGenerator::genNextToken (  )  [inline]

Generate the nextToken() rule. nextToken() is a synthetic lexer rule that is the implicit OR of all user-defined lexer rules.

Definition at line 2166 of file JavaCodeGenerator.java.

References antlr::CodeGenerator::_println(), antlr::Grammar::antlrTool, antlr::Alternative::cache, antlr::Lookahead::containsEpsilon(), antlr::Grammar::define(), genBlockFinish(), genCommonBlock(), antlr::AlternativeBlock::getAlternativeAt(), antlr::AlternativeBlock::getAlternatives(), antlr::RuleBlock::getDefaultErrorHandler(), antlr::Grammar::getSymbol(), antlr::CodeGenerator::grammar, antlr::Alternative::head, antlr::Grammar::isDefined(), antlr::CodeGenerator::println(), antlr::Grammar::rules, antlr::CodeGenerator::tabs, and antlr::Grammar::theLLkAnalyzer.

Referenced by gen().

                               {
        // Are there any public rules?  If not, then just generate a
        // fake nextToken().
        boolean hasPublicRules = false;
        for (int i = 0; i < grammar.rules.size(); i++) {
            RuleSymbol rs = (RuleSymbol)grammar.rules.elementAt(i);
            if (rs.isDefined() && rs.access.equals("public")) {
                hasPublicRules = true;
                break;
            }
        }
        if (!hasPublicRules) {
            println("");
            println("public Token nextToken() throws TokenStreamException {");
            println("\ttry {uponEOF();}");
            println("\tcatch(CharStreamIOException csioe) {");
            println("\t\tthrow new TokenStreamIOException(csioe.io);");
            println("\t}");
            println("\tcatch(CharStreamException cse) {");
            println("\t\tthrow new TokenStreamException(cse.getMessage());");
            println("\t}");
            println("\treturn new CommonToken(Token.EOF_TYPE, \"\");");
            println("}");
            println("");
            return;
        }

        // Create the synthesized nextToken() rule
        RuleBlock nextTokenBlk = MakeGrammar.createNextTokenRule(grammar, grammar.rules, "nextToken");
        // Define the nextToken rule symbol
        RuleSymbol nextTokenRs = new RuleSymbol("mnextToken");
        nextTokenRs.setDefined();
        nextTokenRs.setBlock(nextTokenBlk);
        nextTokenRs.access = "private";
        grammar.define(nextTokenRs);
        // Analyze the nextToken rule
        boolean ok = grammar.theLLkAnalyzer.deterministic(nextTokenBlk);

        // Generate the next token rule
        String filterRule = null;
        if (((LexerGrammar)grammar).filterMode) {
            filterRule = ((LexerGrammar)grammar).filterRule;
        }

        println("");
        println("public Token nextToken() throws TokenStreamException {");
        tabs++;
        println("Token theRetToken=null;");
        _println("tryAgain:");
        println("for (;;) {");
        tabs++;
        println("Token _token = null;");
        println("int _ttype = Token.INVALID_TYPE;");
        if (((LexerGrammar)grammar).filterMode) {
            println("setCommitToPath(false);");
            if (filterRule != null) {
                // Here's a good place to ensure that the filter rule actually exists
                if (!grammar.isDefined(CodeGenerator.encodeLexerRuleName(filterRule))) {
                    grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer");
                }
                else {
                    RuleSymbol rs = (RuleSymbol)grammar.getSymbol(CodeGenerator.encodeLexerRuleName(filterRule));
                    if (!rs.isDefined()) {
                        grammar.antlrTool.error("Filter rule " + filterRule + " does not exist in this lexer");
                    }
                    else if (rs.access.equals("public")) {
                        grammar.antlrTool.error("Filter rule " + filterRule + " must be protected");
                    }
                }
                println("int _m;");
                println("_m = mark();");
            }
        }
        println("resetText();");

        println("try {   // for char stream error handling");
        tabs++;

        // Generate try around whole thing to trap scanner errors
        println("try {   // for lexical error handling");
        tabs++;

        // Test for public lexical rules with empty paths
        for (int i = 0; i < nextTokenBlk.getAlternatives().size(); i++) {
            Alternative a = nextTokenBlk.getAlternativeAt(i);
            if (a.cache[1].containsEpsilon()) {
                //String r = a.head.toString();
                RuleRefElement rr = (RuleRefElement)a.head;
                String r = CodeGenerator.decodeLexerRuleName(rr.targetRule);
                antlrTool.warning("public lexical rule "+r+" is optional (can match \"nothing\")");
            }
        }

        // Generate the block
        String newline = System.getProperty("line.separator");
        JavaBlockFinishingInfo howToFinish = genCommonBlock(nextTokenBlk, false);
        String errFinish = "if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}";
        errFinish += newline + "\t\t\t\t";
        if (((LexerGrammar)grammar).filterMode) {
            if (filterRule == null) {
                errFinish += "else {consume(); continue tryAgain;}";
            }
            else {
                errFinish += "else {" + newline +
                    "\t\t\t\t\tcommit();" + newline +
                    "\t\t\t\t\ttry {m" + filterRule + "(false);}" + newline +
                    "\t\t\t\t\tcatch(RecognitionException e) {" + newline +
                    "\t\t\t\t\t     // catastrophic failure" + newline +
                    "\t\t\t\t\t     reportError(e);" + newline +
                    "\t\t\t\t\t     consume();" + newline +
                    "\t\t\t\t\t}" + newline +
                    "\t\t\t\t\tcontinue tryAgain;" + newline +
                    "\t\t\t\t}";
            }
        }
        else {
            errFinish += "else {" + throwNoViable + "}";
        }
        genBlockFinish(howToFinish, errFinish);

        // at this point a valid token has been matched, undo "mark" that was done
        if (((LexerGrammar)grammar).filterMode && filterRule != null) {
            println("commit();");
        }

        // Generate literals test if desired
        // make sure _ttype is set first; note _returnToken must be
        // non-null as the rule was required to create it.
        println("if ( _returnToken==null ) continue tryAgain; // found SKIP token");
        println("_ttype = _returnToken.getType();");
        if (((LexerGrammar)grammar).getTestLiterals()) {
            genLiteralsTest();
        }

        // return token created by rule reference in switch
        println("_returnToken.setType(_ttype);");
        println("return _returnToken;");

        // Close try block
        tabs--;
        println("}");
        println("catch (RecognitionException e) {");
        tabs++;
        if (((LexerGrammar)grammar).filterMode) {
            if (filterRule == null) {
                println("if ( !getCommitToPath() ) {consume(); continue tryAgain;}");
            }
            else {
                println("if ( !getCommitToPath() ) {");
                tabs++;
                println("rewind(_m);");
                println("resetText();");
                println("try {m" + filterRule + "(false);}");
                println("catch(RecognitionException ee) {");
                println("     // horrendous failure: error in filter rule");
                println("     reportError(ee);");
                println("     consume();");
                println("}");
                println("continue tryAgain;");
                tabs--;
                println("}");
            }
        }
        if (nextTokenBlk.getDefaultErrorHandler()) {
            println("reportError(e);");
            println("consume();");
        }
        else {
            // pass on to invoking routine
            println("throw new TokenStreamRecognitionException(e);");
        }
        tabs--;
        println("}");

        // close CharStreamException try
        tabs--;
        println("}");
        println("catch (CharStreamException cse) {");
        println(" if ( cse instanceof CharStreamIOException ) {");
        println("       throw new TokenStreamIOException(((CharStreamIOException)cse).io);");
        println(" }");
        println(" else {");
        println("       throw new TokenStreamException(cse.getMessage());");
        println(" }");
        println("}");

        // close for-loop
        tabs--;
        println("}");

        // close method nextToken
        tabs--;
        println("}");
        println("");
    }


Generated by  Doxygen 1.6.0   Back to index