package org.apache.lucene.index;

import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.payloads.PayloadHelper;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.codecs.Codec;
//import org.apache.lucene.codecs.lucene50.Lucene50Codec;
//import org.apache.lucene.codecs.lucene54.Lucene54Codec;
//import org.apache.lucene.codecs.lucene62.Lucene62Codec;
//import org.apache.lucene.codecs.lucene70.Lucene70Codec;
//import org.apache.lucene.codecs.lucene80.Lucene80Codec;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;

import java.io.IOException;
import java.util.LinkedList;
import java.util.Random;

public class TestPayloads extends LuceneTestCase {

    public void testPayloadsEncodingWithSimpleTextCodec() throws Exception {
        Directory dir = newDirectory();
        performTest(dir, true);
        dir.close();
    }

    public void testPayloadsEncodingWithLucene62Codec() throws Exception {
        Directory dir = newDirectory();
        performTest(dir, false);
        dir.close();
    }


    private void performTest(Directory dir, boolean isSimpleTextCodec) throws Exception {
        PayloadAnalyzer analyzer = new PayloadAnalyzer();

        final IndexWriterConfig config = new IndexWriterConfig(analyzer);
        Codec codec = new Lucene62Codec();
        if (isSimpleTextCodec)
            codec = new SimpleTextCodec();

        config.setCodec(codec);
        config.setOpenMode(OpenMode.CREATE);
        config.setMergePolicy(newLogMergePolicy());

        IndexWriter writer = new IndexWriter(dir, config);

        int numTerms = 1;
        final String fieldName = "f1";

        Document d1 = new Document();
        d1.add(newTextField(fieldName, "a b", Field.Store.YES));
        writer.addDocument(d1);

        Document d2 = new Document();
        d2.add(newTextField(fieldName, "a", Field.Store.YES));
        writer.addDocument(d2);

        writer.commit();
        writer.forceMerge(1);
        writer.close();

        IndexReader reader = DirectoryReader.open(dir);

        Term[] terms = new Term[numTerms];
        terms[0] = new Term(fieldName, "a");

        float previousDecodedPayload = 0f;

        PostingsEnum[] tps = new PostingsEnum[numTerms];

        //final Bits liveDocs = MultiFields.getLiveDocs( reader );//5.1.0

        for (int termIndex = 0; termIndex < numTerms; termIndex++) {
            //for 8.3.1: MultiFields.getTermPositionsEnum(...) to MultiTerms.getTermPostingsEnum(...)
            tps[termIndex] = MultiFields.getTermPositionsEnum
                    (reader,
                            // liveDocs,//5.1.0
                    terms[termIndex].field(),
                    new BytesRef(terms[termIndex].text()));

            int docCounter = 0;
            while (tps[termIndex].nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
                for (int position = 0; position < tps[termIndex].freq(); position++) {
                    System.out.print("termIndex " + termIndex + " in doc: " + (docCounter) + " position: " + position + " term: `" + terms[termIndex].text() + "` freq: " + tps[termIndex].freq() + " payload: ");
                    tps[termIndex].nextPosition();
                    BytesRef payload = tps[termIndex].getPayload();
                    if (payload != null) {
                        float decodedPayload = PayloadHelper.decodeFloat(payload.bytes);
                        System.out.println("len:" + decodedPayload);
                        if (previousDecodedPayload == decodedPayload) {
                            fail("Payload in the same position of a different document should be different values: " + decodedPayload + ", " + previousDecodedPayload);
                        }
                        previousDecodedPayload = decodedPayload;
                    } else
                        System.out.println("null");
                }
                docCounter++;
            }
        }
        reader.close();

    }

    private static class PayloadAnalyzer extends Analyzer {

        public PayloadAnalyzer() {
            super(PER_FIELD_REUSE_STRATEGY);
        }

        @Override
        public TokenStreamComponents createComponents(String fieldName) {
            Tokenizer source = new WhitespaceTokenizer();
            RandomPayloadFilter spf = new RandomPayloadFilter(source);
            return new TokenStreamComponents(source, spf);
        }
    }


    private static class RandomPayloadFilter extends TokenFilter {

        private Random r = new Random();

        private PayloadAttribute payloadAttr = addAttribute(PayloadAttribute.class);

        private int min=1;
        private int max=100;

        public RandomPayloadFilter(TokenStream input) {
            super(input);
        }

        @Override
        public synchronized final boolean incrementToken() throws IOException {

            while (input.incrementToken()) {
                float random = min + r.nextFloat() * (max - min);
                byte[] scorePayload = PayloadHelper.encodeFloat(random);
                payloadAttr.setPayload(new BytesRef(scorePayload));
                return true;
            }
            return false;
        }

        @Override
        public void reset() throws IOException {
            super.reset();
        }

    }

}
