Resolved compilations and test failures after merging from master.
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/c05523d0 Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/c05523d0 Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/c05523d0 Branch: refs/heads/branch-1.1 Commit: c05523d0df68f618ca36b0ef4cca8bd92c4d0239 Parents: 02f06fd Author: ravipesala <ravi.pes...@gmail.com> Authored: Thu Jun 15 17:18:49 2017 +0530 Committer: ravipesala <ravi.pes...@gmail.com> Committed: Thu Jun 15 17:18:49 2017 +0530 ---------------------------------------------------------------------- .../schema/table/column/CarbonDimension.java | 2 +- .../executer/RangeValueFilterExecuterImpl.java | 2 +- ...wLevelRangeLessThanEqualFilterExecuterImpl.java | 2 +- .../RowLevelRangeLessThanFiterExecuterImpl.java | 2 +- .../AbstractDetailQueryResultIterator.java | 4 +--- .../testsuite/dataload/TestBatchSortDataLoad.scala | 6 +++--- .../testsuite/dataload/TestLoadDataFrame.scala | 4 ++-- .../spark/rdd/CarbonDataRDDFactory.scala | 3 +-- .../spark/rdd/CarbonDataRDDFactory.scala | 17 ----------------- .../apache/spark/sql/common/util/QueryTest.scala | 5 +++-- pom.xml | 4 ---- .../processing/newflow/DataLoadProcessBuilder.java | 1 - .../newflow/sort/unsafe/UnsafeSortDataRows.java | 13 +++++++++++++ 13 files changed, 27 insertions(+), 38 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java index 8d02512..23f4d6c 100644 --- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java +++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java @@ -125,7 +125,7 @@ public class CarbonDimension extends CarbonColumn { * @return is column participated in sorting or not */ public boolean isSortColumn() { - return this.columnSchema.isSortColumn(); + return !isComplex(); } /** http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java index 6823531..12661d2 100644 --- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java +++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java @@ -556,7 +556,7 @@ public class RangeValueFilterExecuterImpl extends ValueBasedFilterExecuterImpl { CarbonDimension currentBlockDimension = segmentProperties.getDimensions().get(dimensionBlocksIndex); defaultValue = FilterUtil.getMaskKey(key, currentBlockDimension, - this.segmentProperties.getSortColumnsGenerator()); + this.segmentProperties.getDimensionKeyGenerator()); } else { defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY; } http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java index d694960..eaf58a4 100644 --- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java +++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java @@ -155,7 +155,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter CarbonDimension currentBlockDimension = segmentProperties.getDimensions().get(dimensionBlocksIndex[0]); defaultValue = FilterUtil.getMaskKey(key, currentBlockDimension, - this.segmentProperties.getSortColumnsGenerator()); + this.segmentProperties.getDimensionKeyGenerator()); } BitSet bitSet = null; if (dimensionColumnDataChunk.isExplicitSorted()) { http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java index b3dd921..e9b6408 100644 --- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java +++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFiterExecuterImpl.java @@ -156,7 +156,7 @@ public class RowLevelRangeLessThanFiterExecuterImpl extends RowLevelFilterExecut CarbonDimension currentBlockDimension = segmentProperties.getDimensions().get(dimensionBlocksIndex[0]); defaultValue = FilterUtil.getMaskKey(key, currentBlockDimension, - this.segmentProperties.getSortColumnsGenerator()); + this.segmentProperties.getDimensionKeyGenerator()); } BitSet bitSet = null; if (dimensionColumnDataChunk.isExplicitSorted()) { http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java index 92e9594..4839cb5 100644 --- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java +++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java @@ -115,9 +115,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato private void intialiseInfos() { for (BlockExecutionInfo blockInfo : blockExecutionInfos) { Map<String, DeleteDeltaVo> deletedRowsMap = null; - DataRefNodeFinder finder = new BTreeDataRefNodeFinder(blockInfo.getEachColumnValueSize(), - blockInfo.getDataBlock().getSegmentProperties().getNumberOfSortColumns(), - blockInfo.getDataBlock().getSegmentProperties().getNumberOfNoDictSortColumns()); + DataRefNodeFinder finder = new BTreeDataRefNodeFinder(blockInfo.getEachColumnValueSize()); // if delete delta file is present if (null != blockInfo.getDeleteDeltaFilePath() && 0 != blockInfo .getDeleteDeltaFilePath().length) { http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala index d53b5e5..af59cde 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala @@ -86,7 +86,7 @@ class TestBatchSortDataLoad extends QueryTest with BeforeAndAfterAll { checkAnswer(sql("select count(*) from carbon_load1"), Seq(Row(100000))) - assert(getIndexfileCount("carbon_load1") == 5, "Something wrong in batch sort") + assert(getIndexfileCount("carbon_load1") == 6, "Something wrong in batch sort") } test("test batch sort load by passing option to load command and compare with normal load") { @@ -167,7 +167,7 @@ class TestBatchSortDataLoad extends QueryTest with BeforeAndAfterAll { checkAnswer(sql("select count(*) from carbon_load3"), Seq(Row(100000))) - assert(getIndexfileCount("carbon_load3") == 5, "Something wrong in batch sort") + assert(getIndexfileCount("carbon_load3") == 6, "Something wrong in batch sort") checkAnswer(sql("select * from carbon_load3 where c1='a1' order by c1"), sql("select * from carbon_load2 where c1='a1' order by c1")) @@ -188,7 +188,7 @@ class TestBatchSortDataLoad extends QueryTest with BeforeAndAfterAll { checkAnswer(sql("select count(*) from carbon_load4"), Seq(Row(100000))) - assert(getIndexfileCount("carbon_load4") == 5, "Something wrong in batch sort") + assert(getIndexfileCount("carbon_load4") == 6, "Something wrong in batch sort") CarbonProperties.getInstance(). addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT) http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala ---------------------------------------------------------------------- diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala index 9179c08..994acf6 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala @@ -171,7 +171,7 @@ class TestLoadDataFrame extends QueryTest with BeforeAndAfterAll { test("test load dataframe with single pass enabled") { // save dataframe to carbon file - df.write + df2.write .format("carbondata") .option("tableName", "carbon8") .option("tempCSV", "false") @@ -186,7 +186,7 @@ class TestLoadDataFrame extends QueryTest with BeforeAndAfterAll { test("test load dataframe with single pass disabled") { // save dataframe to carbon file - df.write + df2.write .format("carbondata") .option("tableName", "carbon9") .option("tempCSV", "true") http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala index dfea7d7..f282f69 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala @@ -48,9 +48,8 @@ import org.apache.carbondata.core.mutate.CarbonUpdateUtil import org.apache.carbondata.core.statusmanager.LoadMetadataDetails import org.apache.carbondata.core.util.CarbonProperties import org.apache.carbondata.core.util.path.CarbonStorePath -import org.apache.carbondata.processing.csvload.BlockDetails import org.apache.carbondata.processing.constants.LoggerAction -import org.apache.carbondata.processing.csvload.{BlockDetails, CSVInputFormat, StringArrayWritable} +import org.apache.carbondata.processing.csvload.BlockDetails import org.apache.carbondata.processing.etl.DataLoadingException import org.apache.carbondata.processing.merger.{CarbonCompactionUtil, CarbonDataMergerUtil, CompactionType} import org.apache.carbondata.processing.model.CarbonLoadModel http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala index 96a8062..124036c 100644 --- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala +++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala @@ -752,23 +752,6 @@ object CarbonDataRDDFactory { } - def loadDataForPartitionTable(): Unit = { - try { - val rdd = repartitionInputData(sqlContext, dataFrame, carbonLoadModel) - status = new PartitionTableDataLoaderRDD(sqlContext.sparkContext, - new DataLoadResultImpl(), - carbonLoadModel, - currentLoadCount, - tableCreationTime, - schemaLastUpdatedTime, - rdd).collect() - } catch { - case ex: Exception => - LOGGER.error(ex, "load data failed for partition table") - throw ex - } - } - if (!updateModel.isDefined) { CarbonLoaderUtil.checkAndCreateCarbonDataLocation(storePath, currentLoadCount.toString, carbonTable) http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala ---------------------------------------------------------------------- diff --git a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala index c37ea1e..be91df8 100644 --- a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala +++ b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala @@ -23,7 +23,7 @@ import scala.collection.JavaConversions._ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.util._ -import org.apache.spark.sql.hive.HiveExternalCatalog +import org.apache.spark.sql.hive.{CarbonSessionState, HiveExternalCatalog} import org.apache.spark.sql.test.TestQueryExecutor import org.apache.spark.sql.{DataFrame, Row} @@ -40,7 +40,8 @@ class QueryTest extends PlanTest { val sqlContext = TestQueryExecutor.INSTANCE.sqlContext - val hiveClient = sqlContext.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client + val hiveClient = sqlContext.sparkSession.sessionState.asInstanceOf[CarbonSessionState] + .metadataHive val resourcesPath = TestQueryExecutor.resourcesPath http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index f2c3aa7..3ee15bc 100644 --- a/pom.xml +++ b/pom.xml @@ -128,10 +128,6 @@ <id>pentaho-releases</id> <url>http://repository.pentaho.org/artifactory/repo/</url> </repository> - <repository> - <id>carbondata-releases</id> - <url>http://136.243.101.176:9091/repository/carbondata/</url> - </repository> </repositories> <dependencyManagement> http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java ---------------------------------------------------------------------- diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java index 5c7c035..a94abd3 100644 --- a/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java +++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java @@ -36,7 +36,6 @@ import org.apache.carbondata.core.util.CarbonProperties; import org.apache.carbondata.processing.model.CarbonLoadModel; import org.apache.carbondata.processing.newflow.constants.DataLoadProcessorConstants; import org.apache.carbondata.processing.newflow.sort.SortScopeOptions; -import org.apache.carbondata.processing.newflow.steps.CarbonRowDataWriterProcessorStepImpl; import org.apache.carbondata.processing.newflow.steps.DataConverterProcessorStepImpl; import org.apache.carbondata.processing.newflow.steps.DataConverterProcessorWithBucketingStepImpl; import org.apache.carbondata.processing.newflow.steps.DataWriterBatchProcessorStepImpl; http://git-wip-us.apache.org/repos/asf/carbondata/blob/c05523d0/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/unsafe/UnsafeSortDataRows.java ---------------------------------------------------------------------- diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/unsafe/UnsafeSortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/unsafe/UnsafeSortDataRows.java index b4daa51..8872dd4 100644 --- a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/unsafe/UnsafeSortDataRows.java +++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/unsafe/UnsafeSortDataRows.java @@ -198,6 +198,19 @@ public class UnsafeSortDataRows { /** * This method will be used to add new row + * + * @param rowBatch new rowBatch + * @throws CarbonSortKeyAndGroupByException problem while writing + */ + public void addRowBatchWithOutSync(Object[][] rowBatch, int size) + throws CarbonSortKeyAndGroupByException { + // if record holder list size is equal to sort buffer size then it will + // sort the list and then write current list data to file + addBatch(rowBatch, size); + } + + /** + * This method will be used to add new row */ public void addRow(Object[] row) throws CarbonSortKeyAndGroupByException { // if record holder list size is equal to sort buffer size then it will