Github user chenliang613 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1286#discussion_r138534203
  
    --- Diff: 
integration/hive/src/test/java/org/apache/carbondata/hive/CarbonHiveRecordReaderTest.java
 ---
    @@ -0,0 +1,234 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.carbondata.hive;
    +
    +import java.io.IOException;
    +import java.util.ArrayList;
    +import java.util.List;
    +import java.util.concurrent.ExecutorService;
    +import java.util.concurrent.ForkJoinPool;
    +
    +import org.apache.carbondata.common.CarbonIterator;
    +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
    +import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
    +import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
    +import 
org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
    +import org.apache.carbondata.core.scan.executor.impl.DetailQueryExecutor;
    +import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
    +import org.apache.carbondata.core.scan.model.QueryModel;
    +import org.apache.carbondata.core.scan.result.BatchResult;
    +import 
org.apache.carbondata.core.scan.result.iterator.AbstractDetailQueryResultIterator;
    +import org.apache.carbondata.core.scan.result.iterator.ChunkRowIterator;
    +import 
org.apache.carbondata.core.scan.result.iterator.DetailQueryResultIterator;
    +import org.apache.carbondata.hadoop.readsupport.CarbonReadSupport;
    +
    +import mockit.Mock;
    +import mockit.MockUp;
    +import org.apache.hadoop.fs.Path;
    +import org.apache.hadoop.hive.serde.serdeConstants;
    +import org.apache.hadoop.io.ArrayWritable;
    +import org.apache.hadoop.io.IntWritable;
    +import org.apache.hadoop.io.Text;
    +import org.apache.hadoop.io.Writable;
    +import org.apache.hadoop.mapred.InputSplit;
    +import org.apache.hadoop.mapred.JobConf;
    +import org.junit.Assert;
    +import org.junit.BeforeClass;
    +import org.junit.Test;
    +
    +public class CarbonHiveRecordReaderTest {
    +    private static CarbonHiveRecordReader carbonHiveRecordReaderObj;
    +    private static AbsoluteTableIdentifier absoluteTableIdentifier;
    +    private static QueryModel queryModel = new QueryModel();
    +    private static CarbonReadSupport<ArrayWritable> readSupport = new 
CarbonDictionaryDecodeReadSupport<>();
    +    private static JobConf jobConf = new JobConf();
    +    private static InputSplit inputSplitNotInstanceOfHiveInputSplit, 
inputSplitInstanceOfHiveInputSplit;
    +    private static BatchResult batchResult = new BatchResult();
    +    private static Writable writable;
    +    private static CarbonIterator carbonIteratorObject;
    +
    +    @BeforeClass
    +    public static void setUp() throws Exception {
    +        String array[] = {"neha", "01", "vaishali"};
    +        writable = new ArrayWritable(array);
    +        absoluteTableIdentifier = new AbsoluteTableIdentifier(
    +                "carbondata/examples/spark2/target/store",
    --- End diff --
    
    Can you please explain, why using this path 
"carbondata/examples/spark2/target/store" for presto test module?


---

Reply via email to