hy,
 
>This will give you (+title:i +title:love +title:lucene)^2 (+author:i +author:love +author:lucene) \
>(+content:i +content:love +content:lucene)
this is not the same thing than
(title:i^2 author:i content:i) +(title:love^2 author:love content:love) +(title:lucene^2 author:lucene content:lucene)
because in the first we must have all the terms in a field  and in the second just one term is necessary
 
the david Spencer is good but we can use the lucene syntax query  like phrase query, prefix, boolean, etc..
so to use all the lucene syntax , we have to hack the parser
 
see my fulltextparser code ..


 
i made a parser
package org.apache.lucene.queryParser;


/**
 * <p>Title: </p>
 * <p>Description: </p>
 * <p>Copyright: Copyright (c) 2003</p>
 * <p>Company: </p>
 * @author Maisonneuve Nicolas
 * @version 1.0
 */
import java.io.IOException;
import java.io.StringReader;
import java.util.Vector;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.CharStream;
import org.apache.lucene.queryParser.FastCharStream;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParserConstants;
import org.apache.lucene.queryParser.QueryParserTokenManager;
import org.apache.lucene.queryParser.Token;
import org.apache.lucene.queryParser.TokenMgrError;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.WildcardQuery;


public class fulltextParser
implements QueryParserConstants {

    private static final int CONJ_NONE=0;

    private static final int CONJ_AND=1;

    private static final int CONJ_OR=2;

    private static final int MOD_NONE=0;

    private static final int MOD_NOT=10;

    private static final int MOD_REQ=11;

    public static final int DEFAULT_OPERATOR_OR=0;

    public static final int DEFAULT_OPERATOR_AND=1;



    /** The actual operator that parser uses to combine query terms */
    private int operator=DEFAULT_OPERATOR_AND;



    /**
     * Whether terms of wildcard and prefix queries are to be automatically
     * lower-cased or not.  Default is <code>true</code>.
     */
    boolean lowercaseWildcardTerms=true;

    Analyzer analyzer;

    String field;

    String[] fields;

    Float[] boosts;

    int phraseSlop=0;



    /** Parses a query string, returning a [EMAIL PROTECTED] org.apache.lucene.search.Query}.
     *  @param query	the query string to be parsed.
     *  @param fields	the default field for query terms.
     *  @param analyzer   used to find terms in the query text.
     *  @throws ParseException if the parsing fails
     */
    static public Query parse (String query, String fields[], Analyzer analyzer) throws ParseException {
        try {
            fulltextParser parser=new fulltextParser(fields, analyzer);
            return parser.parse(query);
        }
        catch(TokenMgrError tme) {
            throw new ParseException(tme.getMessage());
        }
    }

   /** Parses a query string, returning a [EMAIL PROTECTED] org.apache.lucene.search.Query}.
     *  @param query	the query string to be parsed.
     *  @param fields	the default field for query terms.
	 *  @param boost	the boost of each field in the fields parameter
     *  @param analyzer   used to find terms in the query text.
     *  @throws ParseException if the parsing fails
     */
    static public Query parse (String query, String fields[], Float boost[], Analyzer analyzer) throws ParseException {
        try {
            fulltextParser parser=new fulltextParser(fields, boost, analyzer);
            return parser.parse(query);
        }
        catch(TokenMgrError tme) {
            throw new ParseException(tme.getMessage());
        }
    }



    /** Constructs a query parser.
     *  @param field	the default field for query terms.
     *  @param analyzer   used to find terms in the query text.
     */
    public fulltextParser (String[] fields, Analyzer a) {
        this(fields, null, a);
    }


    public fulltextParser (String[] fields, Float boosts[], Analyzer a) {
        this(new FastCharStream(new StringReader("")));
        analyzer=a;
        this.fields=fields;
        this.boosts=boosts;
        field=fields[0];
    }



    /** Parses a query string, returning a
     * <a href="lucene.search.Query.html">Query</a>.
     *  @param query	the query string to be parsed.
     *  @throws ParseException if the parsing fails
     *  @throws TokenMgrError if ther parsing fails
     */
    public Query parse (String query) throws ParseException, TokenMgrError {
        ReInit(new FastCharStream(new StringReader(query)));
        return Query(field);
    }



    /**
     * Sets the default slop for phrases.  If zero, then exact phrase matches
     * are required.  Default value is zero.
     */
    public void setPhraseSlop (int phraseSlop) {
        this.phraseSlop=phraseSlop;
    }



    /**
     * Gets the default slop for phrases.
     */
    public int getPhraseSlop () {
        return phraseSlop;
    }



    /**
     * Sets the boolean operator of the QueryParser.
     * In classic mode (<code>DEFAULT_OPERATOR_OR</mode>) terms without any modifiers
     * are considered optional: for example <code>capital of Hungary</code> is equal to
     * <code>capital OR of OR Hungary</code>.<br/>
     * In <code>DEFAULT_OPERATOR_AND</code> terms are considered to be in conjuction: the
     * above mentioned query is parsed as <code>capital AND of AND Hungary</code>
     */
    public void setOperator (int operator) {
        this.operator=operator;
    }


    public int getOperator () {
        return operator;
    }


    public void setLowercaseWildcardTerms (boolean lowercaseWildcardTerms) {
        this.lowercaseWildcardTerms=lowercaseWildcardTerms;
    }


    public boolean getLowercaseWildcardTerms () {
        return lowercaseWildcardTerms;
    }


    protected void addClause (Vector clauses, int conj, int mods, Query q) {
        boolean required, prohibited;
        // If this term is introduced by AND, make the preceding term required,
        // unless it's already prohibited
        if(conj==CONJ_AND) {
            BooleanClause c=(BooleanClause) clauses.elementAt(clauses.size()-1);
            if(!c.prohibited) {
                c.required=true;
            }
        }
        if(operator==DEFAULT_OPERATOR_AND&&conj==CONJ_OR) {
            // If this term is introduced by OR, make the preceding term optional,
            // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
            // notice if the input is a OR b, first term is parsed as required; without
            // this modification a OR b would parsed as +a OR b
            BooleanClause c=(BooleanClause) clauses.elementAt(clauses.size()-1);
            if(!c.prohibited) {
                c.required=false;
            }
        }
        // We might have been passed a null query; the term might have been
        // filtered away by the analyzer.
        if(q==null) {
            return;
        }
        if(operator==DEFAULT_OPERATOR_OR) {
            // We set REQUIRED if we're introduced by AND or +; PROHIBITED if
            // introduced by NOT or -; make sure not to set both.
            prohibited=(mods==MOD_NOT);
            required=(mods==MOD_REQ);
            if(conj==CONJ_AND&&!prohibited) {
                required=true;
            }
        }
        else {
            // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
            // if not PROHIBITED and not introduced by OR
            prohibited=(mods==MOD_NOT);
            required=(!prohibited&&conj!=CONJ_OR);
        }
        clauses.addElement(new BooleanClause(q, required, prohibited));
    }


    protected Query getFieldQuery (String[] fields, Analyzer analyzer, String queryText) {
        // Use the analyzer to get all the tokens, and then build a TermQuery,
        // PhraseQuery, or nothing based on the term count
        BooleanQuery bQuery=new BooleanQuery();
        TokenStream source=analyzer.tokenStream("", new StringReader(queryText));
        Vector v=new Vector();
        org.apache.lucene.analysis.Token t;
        while(true) {
            try {
                t=source.next();
            }
            catch(IOException e) {
                t=null;
            }
            if(t==null) {
                break;
            }
            v.addElement(t.termText());
        }
        if(v.size()==0) {
            ;
        }
        else if(v.size()==1) {
            for(int i=0; i<fields.length; i++) {
                Term term=new Term(fields[i], (String) v.elementAt(0));
                Query q=new TermQuery(term);
                if(boosts!=null) {
                    q.setBoost(boosts[i].floatValue());
                }
                bQuery.add(q, false, false);
            }
        }
        else {
            for(int i=0; i<fields.length; i++) {
                PhraseQuery q=new PhraseQuery();
                q.setSlop(phraseSlop);
                for(int j=0; j<v.size(); j++) {
                    q.add(new Term(fields[i], (String) v.elementAt(j)));
                }
                if(boosts!=null) {
                    q.setBoost(boosts[i].floatValue());
                }
                bQuery.add(q, false, false);
            }
        }
        return bQuery;
    }



    /**
     * Factory method for generating query, given a set of clauses.
     * By default creates a boolean query composed of clauses passed in.
     *
     * Can be overridden by extending classes, to modify query being
     * returned.
     *
     * @param clauses Vector that contains [EMAIL PROTECTED] BooleanClause} instances
     *    to join.
     *
     * @return Resulting [EMAIL PROTECTED] Query} object.
     */
    protected Query getBooleanQuery (Vector clauses) {
        BooleanQuery query=new BooleanQuery();
        for(int i=0; i<clauses.size(); i++) {
            query.add((BooleanClause) clauses.elementAt(i));
        }
        return query;
    }



    /**
     * Factory method for generating a query. Called when parser
     * parses an input term token that contains one or more wildcard
     * characters (? and *), but is not a prefix term token (one
     * that has just a single * character at the end)
     *<p>
     * Depending on settings, prefix term may be lower-cased
     * automatically. It will not go through the default Analyzer,
     * however, since normal Analyzers are unlikely to work properly
     * with wildcard templates.
     *<p>
     * Can be overridden by extending classes, to provide custom handling for
     * wildcard queries, which may be necessary due to missing analyzer calls.
     *
     * @param field Name of the field query will use.
     * @param termStr Term token that contains one or more wild card
     *   characters (? or *), but is not simple prefix term
     *
     * @return Resulting [EMAIL PROTECTED] Query} built for the term
     */

    protected Query getWildcardQuery (String fields[], String termStr) {
        if(lowercaseWildcardTerms) {
            termStr=termStr.toLowerCase();
        }
        BooleanQuery bQuery=new BooleanQuery();
        for(int i=0; i<fields.length; i++) {
            Term t=new Term(fields[i], termStr);
            Query q=new WildcardQuery(t);
            if(boosts!=null) {
                q.setBoost(boosts[i].floatValue());
            }
            bQuery.add(q, false, false);
        }
        return bQuery;
    }



    /**
     * Factory method for generating a query (similar to
     * ([EMAIL PROTECTED] #getWildcardQuery}). Called when parser parses an input term
     * token that uses prefix notation; that is, contains a single '*' wildcard
     * character as its last character. Since this is a special case
     * of generic wildcard term, and such a query can be optimized easily,
     * this usually results in a different query object.
     *<p>
     * Depending on settings, a prefix term may be lower-cased
     * automatically. It will not go through the default Analyzer,
     * however, since normal Analyzers are unlikely to work properly
     * with wildcard templates.
     *<p>
     * Can be overridden by extending classes, to provide custom handling for
     * wild card queries, which may be necessary due to missing analyzer calls.
     *
     * @param field Name of the field query will use.
     * @param termStr Term token to use for building term for the query
     *    (<b>without</b> trailing '*' character!)
     *
     * @return Resulting [EMAIL PROTECTED] Query} built for the term
     */

    protected Query getPrefixQuery (String fields[], String termStr) {
        if(lowercaseWildcardTerms) {
            termStr=termStr.toLowerCase();
        }
        BooleanQuery bQuery=new BooleanQuery();
        for(int i=0; i<fields.length; i++) {
            Term t=new Term(field, termStr);
            Query q=new PrefixQuery(t);
            if(boosts!=null) {
                q.setBoost(boosts[i].floatValue());
            }
            bQuery.add(q, false, false);
        }
        return bQuery;
    }



    /**
     * Factory method for generating a query (similar to
     * ([EMAIL PROTECTED] #getWildcardQuery}). Called when parser parses
     * an input term token that has the fuzzy suffix (~) appended.
     *
     * @param field Name of the field query will use.
     * @param termStr Term token to use for building term for the query
     *
     * @return Resulting [EMAIL PROTECTED] Query} built for the term
     */

    protected Query getFuzzyQuery (String fields[], String termStr) {
        BooleanQuery bQuery=new BooleanQuery();
        for(int i=0; i<fields.length; i++) {
            Term t=new Term(fields[i], termStr);
            Query q=new FuzzyQuery(t);
            if(boosts!=null) {
                q.setBoost(boosts[i].floatValue());
            }
            bQuery.add(q, false, false);
        }
        return bQuery;
    }


    public static void main (String[] args) throws Exception {
        fulltextParser qp=new fulltextParser(new String[] {
        "field1", "field2"}
        , new Float[] {
        new Float(1f), new Float(2f)}
        , new org.apache.lucene.analysis.SimpleAnalyzer());
        Query q=qp.parse("\"why under\"");
        System.out.println(q.toString());
        /*    qp.ReInit(new FastCharStream(new StringReader("coucou nico +salut")));
            System.out.println(qp.Clause("field").toString());
            System.out.println(qp.Clause("field2").toString());
         */
    }



// *   Query  ::= ( Clause )*
// *   Clause ::= ["+", "-"] [<TERM> ":"] ( <TERM> | "(" Query ")" )
    final public int Conjunction () throws ParseException {
        int ret=CONJ_NONE;
        switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
            case AND:
            case OR:
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case AND:
                        jj_consume_token(AND);
                        ret=CONJ_AND;
                        break;
                    case OR:
                        jj_consume_token(OR);
                        ret=CONJ_OR;
                        break;
                    default:
                        jj_la1[0]=jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                break;
            default:
                jj_la1[1]=jj_gen; ;
        }
        {
            if(true) {
                return ret;
            }
        }
        throw new Error("Missing return statement in function");
    }


    final public int Modifiers () throws ParseException {
        int ret=MOD_NONE;
        switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
            case NOT:
            case PLUS:
            case MINUS:
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case PLUS:
                        jj_consume_token(PLUS);
                        ret=MOD_REQ;
                        break;
                    case MINUS:
                        jj_consume_token(MINUS);
                        ret=MOD_NOT;
                        break;
                    case NOT:
                        jj_consume_token(NOT);
                        ret=MOD_NOT;
                        break;
                    default:
                        jj_la1[2]=jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                break;
            default:
                jj_la1[3]=jj_gen; ;
        }
        {
            if(true) {
                return ret;
            }
        }
        throw new Error("Missing return statement in function");
    }


    final public Query Query (String field) throws ParseException {
        Vector clauses=new Vector();
        Query q, firstQuery=null;
        int conj, mods;
        mods=Modifiers();
        q=Clause(field);
        addClause(clauses, CONJ_NONE, mods, q);
        if(mods==MOD_NONE) {
            firstQuery=q;
        }
        label_1:while(true) {
            switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                case AND:
                case OR:
                case NOT:
                case PLUS:
                case MINUS:
                case LPAREN:
                case QUOTED:
                case TERM:
                case PREFIXTERM:
                case WILDTERM:
                case RANGEIN_START:
                case RANGEEX_START:
                case NUMBER:
                    ;
                    break;
                default:
                    jj_la1[4]=jj_gen;
                    break label_1;
            }
            conj=Conjunction();
            mods=Modifiers();
            q=Clause(field);
            addClause(clauses, conj, mods, q);
        }
        if(clauses.size()==1&&firstQuery!=null) {
            if(true) {
                return firstQuery;
            }
        }
        else {
            {
                if(true) {
                    return getBooleanQuery(clauses);
                }
            }
        }
        throw new Error("Missing return statement in function");
    }


    final public Query Clause (String field) throws ParseException {
        Query q;
        Token fieldToken=null, boost=null;
        if(jj_2_1(2)) {
            fieldToken=jj_consume_token(TERM);
            jj_consume_token(COLON);
            field=fieldToken.image;
        }
        else {
            ;
        }
        switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
            case QUOTED:
            case TERM:
            case PREFIXTERM:
            case WILDTERM:
            case RANGEIN_START:
            case RANGEEX_START:
            case NUMBER:
                q=Term(field);
                break;
            case LPAREN:
                jj_consume_token(LPAREN);
                q=Query(field);
                jj_consume_token(RPAREN);
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case CARAT:
                        jj_consume_token(CARAT);
                        boost=jj_consume_token(NUMBER);
                        break;
                    default:
                        jj_la1[5]=jj_gen; ;
                }
                break;
            default:
                jj_la1[6]=jj_gen;
                jj_consume_token(-1);
                throw new ParseException();
        }
        if(boost!=null) {
            float f=(float) 1.0;
            try {
                f=Float.valueOf(boost.image).floatValue();
                q.setBoost(f);
            }
            catch(Exception ignored) {}
        }
        {
            if(true) {
                return q;
            }
        }
        throw new Error("Missing return statement in function");
    }


    final public Query Term (String field) throws ParseException {
        Token term, boost=null, slop=null, goop1, goop2;
        boolean prefix=false;
        boolean wildcard=false;
        boolean fuzzy=false;
        boolean rangein=false;
        Query q;
        switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
            case TERM:
            case PREFIXTERM:
            case WILDTERM:
            case NUMBER:
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case TERM:
                        term=jj_consume_token(TERM);
                        break;
                    case PREFIXTERM:
                        term=jj_consume_token(PREFIXTERM);
                        prefix=true;
                        break;
                    case WILDTERM:
                        term=jj_consume_token(WILDTERM);
                        wildcard=true;
                        break;
                    case NUMBER:
                        term=jj_consume_token(NUMBER);
                        break;
                    default:
                        jj_la1[7]=jj_gen;
                        jj_consume_token(-1);
                        throw new ParseException();
                }
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case FUZZY:
                        jj_consume_token(FUZZY);
                        fuzzy=true;
                        break;
                    default:
                        jj_la1[8]=jj_gen; ;
                }
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case CARAT:
                        jj_consume_token(CARAT);
                        boost=jj_consume_token(NUMBER);
                        switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                            case FUZZY:
                                jj_consume_token(FUZZY);
                                fuzzy=true;
                                break;
                            default:
                                jj_la1[9]=jj_gen; ;
                        }
                        break;
                    default:
                        jj_la1[10]=jj_gen; ;
                }
                if(wildcard) {
                    q=getWildcardQuery(fields, term.image);
                }
                else if(prefix) {
                    q=getPrefixQuery(fields, term.image.substring(0, term.image.length()-1));
                }
                else if(fuzzy) {
                    q=getFuzzyQuery(fields, term.image);
                }
                else {
                    q=getFieldQuery(fields, analyzer, term.image);
                }
                break;
            case QUOTED:
                term=jj_consume_token(QUOTED);
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case SLOP:
                        slop=jj_consume_token(SLOP);
                        break;
                    default:
                        jj_la1[19]=jj_gen; ;
                }
                switch((jj_ntk==-1)?jj_ntk():jj_ntk) {
                    case CARAT:
                        jj_consume_token(CARAT);
                        boost=jj_consume_token(NUMBER);
                        break;
                    default:
                        jj_la1[20]=jj_gen; ;
                }
                q=getFieldQuery(fields, analyzer, term.image.substring(1, term.image.length()-1));
                if(slop!=null&&q instanceof PhraseQuery) {
                    try {
                        int s=Float.valueOf(slop.image.substring(1)).intValue();
                        ((PhraseQuery) q).setSlop(s);
                    }
                    catch(Exception ignored) {}
                }
                break;
            default:
                jj_la1[21]=jj_gen;
                jj_consume_token(-1);
                throw new ParseException();
        }
        if(boost!=null) {
            float f=(float) 1.0;
            try {
                f=Float.valueOf(boost.image).floatValue();
            }
            catch(Exception ignored) {
                /* Should this be handled somehow? (defaults to "no boost", if
                 * boost number is invalid)
                 */
            }
            // avoid boosting null queries, such as those caused by stop words
            if(q!=null) {
                q.setBoost(f);
            }
        }
        {
            if(true) {
                return q;
            }
        }
        throw new Error("Missing return statement in function");
    }


    final private boolean jj_2_1 (int xla) {
        jj_la=xla;
        jj_lastpos=jj_scanpos=token;
        try {
            return!jj_3_1();
        }
        catch(LookaheadSuccess ls) {
            return true;
        }
        finally {
            jj_save(0, xla);
        }
    }


    final private boolean jj_3_1 () {
        if(jj_scan_token(TERM)) {
            return true;
        }
        if(jj_scan_token(COLON)) {
            return true;
        }
        return false;
    }


    public QueryParserTokenManager token_source;

    public Token token, jj_nt;

    private int jj_ntk;

    private Token jj_scanpos, jj_lastpos;

    private int jj_la;

    public boolean lookingAhead=false;

    private boolean jj_semLA;

    private int jj_gen;

    final private int[] jj_la1=new int[22];

    static private int[] jj_la1_0;

    static private int[] jj_la1_1;

    static {
        jj_la1_0();
        jj_la1_1();
    }


    private static void jj_la1_0 () {
        jj_la1_0=new int[] {
        0x180, 0x180, 0xe00, 0xe00, 0x1f31f80, 0x8000, 0x1f31000, 0x1320000, 0x40000, 0x40000, 0x8000, 0x18000000
        , 0x2000000, 0x18000000, 0x8000, 0x80000000, 0x20000000, 0x80000000, 0x8000, 0x80000, 0x8000, 0x1f30000, };
    }


    private static void jj_la1_1 () {
        jj_la1_1=new int[] {
        0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, };
    }


    final private JJCalls[] jj_2_rtns=new JJCalls[1];

    private boolean jj_rescan=false;

    private int jj_gc=0;

    public fulltextParser (CharStream stream) {
        token_source=new QueryParserTokenManager(stream);
        token=new Token();
        jj_ntk=-1;
        jj_gen=0;
        for(int i=0; i<22; i++) {
            jj_la1[i]=-1;
        }
        for(int i=0; i<jj_2_rtns.length; i++) {
            jj_2_rtns[i]=new JJCalls();
        }
    }


    public void ReInit (CharStream stream) {
        token_source.ReInit(stream);
        token=new Token();
        jj_ntk=-1;
        jj_gen=0;
        for(int i=0; i<22; i++) {
            jj_la1[i]=-1;
        }
        for(int i=0; i<jj_2_rtns.length; i++) {
            jj_2_rtns[i]=new JJCalls();
        }
    }


    public fulltextParser (QueryParserTokenManager tm) {
        token_source=tm;
        token=new Token();
        jj_ntk=-1;
        jj_gen=0;
        for(int i=0; i<22; i++) {
            jj_la1[i]=-1;
        }
        for(int i=0; i<jj_2_rtns.length; i++) {
            jj_2_rtns[i]=new JJCalls();
        }
    }


    public void ReInit (QueryParserTokenManager tm) {
        token_source=tm;
        token=new Token();
        jj_ntk=-1;
        jj_gen=0;
        for(int i=0; i<22; i++) {
            jj_la1[i]=-1;
        }
        for(int i=0; i<jj_2_rtns.length; i++) {
            jj_2_rtns[i]=new JJCalls();
        }
    }


    final private Token jj_consume_token (int kind) throws ParseException {
        Token oldToken;
        if((oldToken=token).next!=null) {
            token=token.next;
        }
        else {
            token=token.next=token_source.getNextToken();
        }
        jj_ntk=-1;
        if(token.kind==kind) {
            jj_gen++;
            if(++jj_gc>100) {
                jj_gc=0;
                for(int i=0; i<jj_2_rtns.length; i++) {
                    JJCalls c=jj_2_rtns[i];
                    while(c!=null) {
                        if(c.gen<jj_gen) {
                            c.first=null;
                        }
                        c=c.next;
                    }
                }
            }
            return token;
        }
        token=oldToken;
        jj_kind=kind;
        throw generateParseException();
    }


    static private final class LookaheadSuccess
    extends java.lang.Error {}


    final private LookaheadSuccess jj_ls=new LookaheadSuccess();

    final private boolean jj_scan_token (int kind) {
        if(jj_scanpos==jj_lastpos) {
            jj_la--;
            if(jj_scanpos.next==null) {
                jj_lastpos=jj_scanpos=jj_scanpos.next=token_source.getNextToken();
            }
            else {
                jj_lastpos=jj_scanpos=jj_scanpos.next;
            }
        }
        else {
            jj_scanpos=jj_scanpos.next;
        }
        if(jj_rescan) {
            int i=0;
            Token tok=token;
            while(tok!=null&&tok!=jj_scanpos) {
                i++;
                tok=tok.next;
            }
            if(tok!=null) {
                jj_add_error_token(kind, i);
            }
        }
        if(jj_scanpos.kind!=kind) {
            return true;
        }
        if(jj_la==0&&jj_scanpos==jj_lastpos) {
            throw jj_ls;
        }
        return false;
    }


    final public Token getNextToken () {
        if(token.next!=null) {
            token=token.next;
        }
        else {
            token=token.next=token_source.getNextToken();
        }
        jj_ntk=-1;
        jj_gen++;
        return token;
    }


    final public Token getToken (int index) {
        Token t=lookingAhead?jj_scanpos:token;
        for(int i=0; i<index; i++) {
            if(t.next!=null) {
                t=t.next;
            }
            else {
                t=t.next=token_source.getNextToken();
            }
        }
        return t;
    }


    final private int jj_ntk () {
        if((jj_nt=token.next)==null) {
            return(jj_ntk=(token.next=token_source.getNextToken()).kind);
        }
        else {
            return(jj_ntk=jj_nt.kind);
        }
    }


    private java.util.Vector jj_expentries=new java.util.Vector();

    private int[] jj_expentry;

    private int jj_kind=-1;

    private int[] jj_lasttokens=new int[100];

    private int jj_endpos;

    private void jj_add_error_token (int kind, int pos) {
        if(pos>=100) {
            return;
        }
        if(pos==jj_endpos+1) {
            jj_lasttokens[jj_endpos++]=kind;
        }
        else if(jj_endpos!=0) {
            jj_expentry=new int[jj_endpos];
            for(int i=0; i<jj_endpos; i++) {
                jj_expentry[i]=jj_lasttokens[i];
            }
            boolean exists=false;
            for(java.util.Enumeration e=jj_expentries.elements(); e.hasMoreElements(); ) {
                int[] oldentry=(int[]) (e.nextElement());
                if(oldentry.length==jj_expentry.length) {
                    exists=true;
                    for(int i=0; i<jj_expentry.length; i++) {
                        if(oldentry[i]!=jj_expentry[i]) {
                            exists=false;
                            break;
                        }
                    }
                    if(exists) {
                        break;
                    }
                }
            }
            if(!exists) {
                jj_expentries.addElement(jj_expentry);
            }
            if(pos!=0) {
                jj_lasttokens[(jj_endpos=pos)-1]=kind;
            }
        }
    }


    public ParseException generateParseException () {
        jj_expentries.removeAllElements();
        boolean[] la1tokens=new boolean[33];
        for(int i=0; i<33; i++) {
            la1tokens[i]=false;
        }
        if(jj_kind>=0) {
            la1tokens[jj_kind]=true;
            jj_kind=-1;
        }
        for(int i=0; i<22; i++) {
            if(jj_la1[i]==jj_gen) {
                for(int j=0; j<32; j++) {
                    if((jj_la1_0[i]&(1<<j))!=0) {
                        la1tokens[j]=true;
                    }
                    if((jj_la1_1[i]&(1<<j))!=0) {
                        la1tokens[32+j]=true;
                    }
                }
            }
        }
        for(int i=0; i<33; i++) {
            if(la1tokens[i]) {
                jj_expentry=new int[1];
                jj_expentry[0]=i;
                jj_expentries.addElement(jj_expentry);
            }
        }
        jj_endpos=0;
        jj_rescan_token();
        jj_add_error_token(0, 0);
        int[][] exptokseq=new int[jj_expentries.size()][];
        for(int i=0; i<jj_expentries.size(); i++) {
            exptokseq[i]=(int[]) jj_expentries.elementAt(i);
        }
        return new ParseException(token, exptokseq, tokenImage);
    }


    final public void enable_tracing () {
    }


    final public void disable_tracing () {
    }


    final private void jj_rescan_token () {
        jj_rescan=true;
        for(int i=0; i<1; i++) {
            JJCalls p=jj_2_rtns[i];
            do {
                if(p.gen>jj_gen) {
                    jj_la=p.arg;
                    jj_lastpos=jj_scanpos=p.first;
                    switch(i) {
                        case 0:
                            jj_3_1();
                            break;
                    }
                }
                p=p.next;
            }
            while(p!=null);
        }
        jj_rescan=false;
    }


    final private void jj_save (int index, int xla) {
        JJCalls p=jj_2_rtns[index];
        while(p.gen>jj_gen) {
            if(p.next==null) {
                p=p.next=new JJCalls();
                break;
            }
            p=p.next;
        }
        p.gen=jj_gen+xla-jj_la;
        p.first=token;
        p.arg=xla;
    }


    static final class JJCalls {
        int gen;

        Token first;

        int arg;

        JJCalls next;
    }

}

---------------------------------------------------------------------
To unsubscribe, e-mail: [EMAIL PROTECTED]
For additional commands, e-mail: [EMAIL PROTECTED]

Reply via email to