inspection code inspection optiminization
style style Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/5928bb92 Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/5928bb92 Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/5928bb92 Branch: refs/heads/branch-0.1 Commit: 5928bb92fb3b3940d4a5e520f74b596e674228b1 Parents: 4a11f07 Author: Zhangshunyu <zhangshu...@huawei.com> Authored: Sun Sep 18 11:00:35 2016 +0800 Committer: ravipesala <ravi.pes...@gmail.com> Committed: Thu Sep 22 10:34:27 2016 +0530 ---------------------------------------------------------------------- .../compression/type/UnCompressDefaultLong.java | 4 +--- .../spark/rdd/CarbonDataLoadRDD.scala | 2 +- .../spark/rdd/CarbonDataRDDFactory.scala | 2 +- .../carbondata/spark/rdd/CarbonMergerRDD.scala | 6 ++--- .../carbondata/spark/util/CommonUtil.scala | 8 +++---- .../spark/util/GlobalDictionaryUtil.scala | 2 +- .../org/apache/spark/sql/CarbonSqlParser.scala | 24 ++++++++++---------- .../execution/command/carbonTableSchema.scala | 4 ++-- .../processing/mdkeygen/MDKeyGenStep.java | 8 ++----- .../util/CarbonDataProcessorUtil.java | 5 ++-- 10 files changed, 29 insertions(+), 36 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressDefaultLong.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressDefaultLong.java b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressDefaultLong.java index b30932c..a4d3b96 100644 --- a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressDefaultLong.java +++ b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressDefaultLong.java @@ -41,9 +41,7 @@ public class UnCompressDefaultLong extends UnCompressNoneLong { @Override public CarbonReadDataHolder getValues(int decimal, Object maxValueObject) { CarbonReadDataHolder dataHolder = new CarbonReadDataHolder(); long[] vals = new long[value.length]; - for (int i = 0; i < vals.length; i++) { - vals[i] = value[i]; - } + System.arraycopy(value, 0, vals, 0, vals.length); dataHolder.setReadableLongValues(vals); return dataHolder; } http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala index 60a2d00..86d58a5 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala @@ -175,7 +175,7 @@ class CarbonDataLoadRDD[K, V]( if(carbonUseLocalDir.equalsIgnoreCase("true")) { val storeLocations = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf) - if (null != storeLocations && storeLocations.length > 0) { + if (null != storeLocations && storeLocations.nonEmpty) { storeLocation = storeLocations(Random.nextInt(storeLocations.length)) } if (storeLocation == null) { http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala index 31cc8ac..3118d3f 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala @@ -696,7 +696,7 @@ object CarbonDataRDDFactory extends Logging { ) var storeLocation = "" val configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf) - if (null != configuredStore && configuredStore.length > 0) { + if (null != configuredStore && configuredStore.nonEmpty) { storeLocation = configuredStore(Random.nextInt(configuredStore.length)) } if (storeLocation == null) { http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala index 54d7539..8136b35 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala @@ -319,9 +319,9 @@ class CarbonMergerRDD[K, V]( .add(new NodeInfo(blocksPerNode.getTaskId, blocksPerNode.getTableBlockInfoList.size)) }) if (list.size() != 0) { - result.add(new CarbonSparkPartition(id, i, Seq(entry._1).toArray, list)) - i += 1 - } + result.add(new CarbonSparkPartition(id, i, Seq(entry._1).toArray, list)) + i += 1 + } } // print the node info along with task and number of blocks for the task. http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala index 3d85c1a..67f06e3 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala @@ -34,7 +34,7 @@ object CommonUtil { if (noDictionaryDims.contains(x)) { throw new MalformedCarbonCommandException( "Column group is not supported for no dictionary columns:" + x) - } else if (msrs.filter { msr => msr.column.equals(x) }.size > 0) { + } else if (msrs.filter { msr => msr.column.equals(x) }.nonEmpty) { // if column is measure throw new MalformedCarbonCommandException("Column group is not supported for measures:" + x) } else if (foundIndExistingColGrp(x)) { @@ -47,7 +47,7 @@ object CommonUtil { "Column group doesn't support Timestamp datatype:" + x) } // if invalid column is present - else if (dims.filter { dim => dim.column.equalsIgnoreCase(x) }.size == 0) { + else if (dims.filter { dim => dim.column.equalsIgnoreCase(x) }.isEmpty) { throw new MalformedCarbonCommandException( "column in column group is not a valid column :" + x ) @@ -69,7 +69,7 @@ object CommonUtil { def isTimeStampColumn(colName: String, dims: Seq[Field]): Boolean = { dims.foreach { dim => if (dim.column.equalsIgnoreCase(colName)) { - if (None != dim.dataType && null != dim.dataType.get && + if (dim.dataType.isDefined && null != dim.dataType.get && "timestamp".equalsIgnoreCase(dim.dataType.get)) { return true } @@ -80,7 +80,7 @@ object CommonUtil { def isComplex(colName: String, dims: Seq[Field]): Boolean = { dims.foreach { x => - if (None != x.children && null != x.children.get && x.children.get.size > 0) { + if (x.children.isDefined && null != x.children.get && x.children.get.nonEmpty) { val children = x.children.get if (x.column.equals(colName)) { return true http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala index 02b70d0..818aa4a 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala @@ -596,7 +596,7 @@ object GlobalDictionaryUtil extends Logging { var columnName: String = "" var value: String = "" // such as "," , "", throw ex - if (tokens.size == 0) { + if (tokens.isEmpty) { logError("Read a bad dictionary record: " + x) accum += 1 } else if (tokens.size == 1) { http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala index 5675603..539c302 100644 --- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala +++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala @@ -363,7 +363,7 @@ class CarbonSqlParser() val dupColsGrp = cols.asScala.groupBy(x => x.getName) filter { case (_, colList) => colList.size > 1 } - if (dupColsGrp.size > 0) { + if (dupColsGrp.nonEmpty) { var columnName: String = "" dupColsGrp.toSeq.foreach(columnName += _._1 + ", ") columnName = columnName.substring(0, columnName.lastIndexOf(", ")) @@ -454,7 +454,7 @@ class CarbonSqlParser() catch { case ce: MalformedCarbonCommandException => val message = if (tableName.isEmpty) "Create table command failed. " - else if (!dbName.isDefined) s"Create table command failed for $tableName. " + else if (dbName.isEmpty) s"Create table command failed for $tableName. " else s"Create table command failed for ${dbName.get}.$tableName. " LOGGER.audit(message + ce.getMessage) throw ce @@ -515,7 +515,7 @@ class CarbonSqlParser() val (dims: Seq[Field], noDictionaryDims: Seq[String]) = extractDimColsAndNoDictionaryFields( fields, tableProperties) - if (dims.length == 0) { + if (dims.isEmpty) { throw new MalformedCarbonCommandException(s"Table ${dbName.getOrElse( CarbonCommonConstants.DATABASE_DEFAULT_NAME)}.$tableName" + " can not be created without key columns. Please use DICTIONARY_INCLUDE or " + @@ -554,7 +554,7 @@ class CarbonSqlParser() noDictionaryDims: Seq[String], msrs: Seq[Field], dims: Seq[Field]): Seq[String] = { - if (None != tableProperties.get(CarbonCommonConstants.COLUMN_GROUPS)) { + if (tableProperties.get(CarbonCommonConstants.COLUMN_GROUPS).isDefined) { var splittedColGrps: Seq[String] = Seq[String]() val nonSplitCols: String = tableProperties.get(CarbonCommonConstants.COLUMN_GROUPS).get @@ -603,7 +603,7 @@ class CarbonSqlParser() true } val colGrpNames: StringBuilder = StringBuilder.newBuilder - for (i <- 0 until colGrpFieldIndx.length) { + for (i <- colGrpFieldIndx.indices) { colGrpNames.append(dims(colGrpFieldIndx(i)).column) if (i < (colGrpFieldIndx.length - 1)) { colGrpNames.append(",") @@ -629,11 +629,11 @@ class CarbonSqlParser() var partitionClass: String = "" var partitionCount: Int = 1 var partitionColNames: Array[String] = Array[String]() - if (None != tableProperties.get(CarbonCommonConstants.PARTITIONCLASS)) { + if (tableProperties.get(CarbonCommonConstants.PARTITIONCLASS).isDefined) { partitionClass = tableProperties.get(CarbonCommonConstants.PARTITIONCLASS).get } - if (None != tableProperties.get(CarbonCommonConstants.PARTITIONCOUNT)) { + if (tableProperties.get(CarbonCommonConstants.PARTITIONCOUNT).isDefined) { try { partitionCount = tableProperties.get(CarbonCommonConstants.PARTITIONCOUNT).get.toInt } catch { @@ -684,14 +684,14 @@ class CarbonSqlParser() colPropMap: java.util.HashMap[String, java.util.List[ColumnProperty]]) { val (tblPropKey, colProKey) = getKey(parentColumnName, columnName) val colProps = CommonUtil.getColumnProperties(tblPropKey, tableProperties) - if (None != colProps) { + if (colProps.isDefined) { colPropMap.put(colProKey, colProps.get) } } def getKey(parentColumnName: Option[String], columnName: String): (String, String) = { - if (None != parentColumnName) { + if (parentColumnName.isDefined) { if (columnName == "val") { (parentColumnName.get, parentColumnName.get + "." + columnName) } else { @@ -881,13 +881,13 @@ class CarbonSqlParser() var dictExcludedCols: Array[String] = Array[String]() // get all included cols - if (None != tableProperties.get(CarbonCommonConstants.DICTIONARY_INCLUDE)) { + if (tableProperties.get(CarbonCommonConstants.DICTIONARY_INCLUDE).isDefined) { dictIncludedCols = tableProperties.get(CarbonCommonConstants.DICTIONARY_INCLUDE).get.split(',').map(_.trim) } // get all excluded cols - if (None != tableProperties.get(CarbonCommonConstants.DICTIONARY_EXCLUDE)) { + if (tableProperties.get(CarbonCommonConstants.DICTIONARY_EXCLUDE).isDefined) { dictExcludedCols = tableProperties.get(CarbonCommonConstants.DICTIONARY_EXCLUDE).get.split(',').map(_.trim) } @@ -1038,7 +1038,7 @@ class CarbonSqlParser() case (_, optionlist) => optionlist.size > 1 } val duplicates = StringBuilder.newBuilder - if (duplicateOptions.size > 0) { + if (duplicateOptions.nonEmpty) { duplicateOptions.foreach(x => { duplicates.append(x._1) } http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala ---------------------------------------------------------------------- diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala index 2047872..e4a79ab 100644 --- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala +++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala @@ -211,7 +211,7 @@ class TableNewProcessor(cm: tableModel, sqlContext: SQLContext) { encoders.add(Encoding.DIRECT_DICTIONARY) } val colPropMap = new java.util.HashMap[String, String]() - if (None != cm.colProps && null != cm.colProps.get.get(colName)) { + if (cm.colProps.isDefined && null != cm.colProps.get.get(colName)) { val colProps = cm.colProps.get.get(colName) colProps.asScala.foreach { x => colPropMap.put(x.key, x.value) } } @@ -1071,7 +1071,7 @@ private[sql] case class LoadTable( carbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema) var storeLocation = "" val configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf) - if (null != configuredStore && configuredStore.length > 0) { + if (null != configuredStore && configuredStore.nonEmpty) { storeLocation = configuredStore(Random.nextInt(configuredStore.length)) } if (storeLocation == null) { http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/processing/src/main/java/org/apache/carbondata/processing/mdkeygen/MDKeyGenStep.java ---------------------------------------------------------------------- diff --git a/processing/src/main/java/org/apache/carbondata/processing/mdkeygen/MDKeyGenStep.java b/processing/src/main/java/org/apache/carbondata/processing/mdkeygen/MDKeyGenStep.java index ada5cc8..1f883dc 100644 --- a/processing/src/main/java/org/apache/carbondata/processing/mdkeygen/MDKeyGenStep.java +++ b/processing/src/main/java/org/apache/carbondata/processing/mdkeygen/MDKeyGenStep.java @@ -302,9 +302,7 @@ public class MDKeyGenStep extends BaseStep { int simpleDimsCount = this.dimensionCount - meta.getComplexDimsCount(); int[] simpleDimsLen = new int[simpleDimsCount]; - for (int i = 0; i < simpleDimsCount; i++) { - simpleDimsLen[i] = dimLens[i]; - } + System.arraycopy(dimLens, 0, simpleDimsLen, 0, simpleDimsCount); CarbonTable carbonTable = CarbonMetadata.getInstance() .getCarbonTable(meta.getDatabaseName() + CarbonCommonConstants.UNDERSCORE + tableName); @@ -352,9 +350,7 @@ public class MDKeyGenStep extends BaseStep { private void initDataHandler() { int simpleDimsCount = this.dimensionCount - meta.getComplexDimsCount(); int[] simpleDimsLen = new int[simpleDimsCount]; - for (int i = 0; i < simpleDimsCount; i++) { - simpleDimsLen[i] = dimLens[i]; - } + System.arraycopy(dimLens, 0, simpleDimsLen, 0, simpleDimsCount); CarbonDataFileAttributes carbonDataFileAttributes = new CarbonDataFileAttributes(meta.getTaskNo(), meta.getFactTimeStamp()); initAggType(); http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5928bb92/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java ---------------------------------------------------------------------- diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java index 9168556..680d730 100644 --- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java +++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java @@ -231,8 +231,7 @@ public final class CarbonDataProcessorUtil { .append(CarbonCommonConstants.HASH_SPC_CHARACTER); } String loadNames = - builder.substring(0, builder.lastIndexOf(CarbonCommonConstants.HASH_SPC_CHARACTER)) - .toString(); + builder.substring(0, builder.lastIndexOf(CarbonCommonConstants.HASH_SPC_CHARACTER)); return loadNames; } @@ -247,7 +246,7 @@ public final class CarbonDataProcessorUtil { .append(CarbonCommonConstants.HASH_SPC_CHARACTER); } String modOrDelTimesStamp = - builder.substring(0, builder.indexOf(CarbonCommonConstants.HASH_SPC_CHARACTER)).toString(); + builder.substring(0, builder.indexOf(CarbonCommonConstants.HASH_SPC_CHARACTER)); return modOrDelTimesStamp; }