KYLIN-2242 fix the bug that couldn’t parse partition column max/min value 
error


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a86569a5
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a86569a5
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a86569a5

Branch: refs/heads/spark-it
Commit: a86569a5c0b49f5a607f642a70af7a826fda3edf
Parents: 571b791
Author: shaofengshi <shaofeng...@apache.org>
Authored: Tue Jan 24 15:59:19 2017 +0800
Committer: shaofengshi <shaofeng...@apache.org>
Committed: Tue Jan 24 15:59:38 2017 +0800

----------------------------------------------------------------------
 .../mr/steps/UpdateCubeInfoAfterBuildStep.java  | 20 +++++++++++++++-----
 1 file changed, 15 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a86569a5/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
----------------------------------------------------------------------
diff --git 
a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
 
b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
index 79fe657..2efd718 100644
--- 
a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
+++ 
b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
@@ -18,7 +18,9 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.io.BufferedReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -79,6 +81,10 @@ public class UpdateCubeInfoAfterBuildStep extends 
AbstractExecutable {
 
     private void updateTimeRange(CubeSegment segment) throws IOException {
         final TblColRef partitionCol = 
segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
+
+        if (partitionCol == null) {
+            return;
+        }
         final String factColumnsInputPath = 
this.getParams().get(BatchConstants.CFG_OUTPUT_PATH);
         Path colDir = new Path(factColumnsInputPath, 
partitionCol.getIdentity());
         FileSystem fs = HadoopUtil.getWorkingFileSystem();
@@ -88,15 +94,19 @@ public class UpdateCubeInfoAfterBuildStep extends 
AbstractExecutable {
         }
 
         FSDataInputStream is = null;
-        long minValue = Long.MAX_VALUE, maxValue = Long.MIN_VALUE;
+        BufferedReader bufferedReader = null;
+        InputStreamReader isr = null;
+        long minValue, maxValue;
         try {
             is = fs.open(outputFile);
-            long min = is.readLong();
-            long max = is.readLong();
-            minValue = Math.min(min, minValue);
-            maxValue = Math.max(max, maxValue);
+            isr = new InputStreamReader(is);
+            bufferedReader = new BufferedReader(isr);
+            minValue = Long.parseLong(bufferedReader.readLine());
+            maxValue = Long.parseLong(bufferedReader.readLine());
         } finally {
             IOUtils.closeQuietly(is);
+            IOUtils.closeQuietly(isr);
+            IOUtils.closeQuietly(bufferedReader);
         }
         logger.info("updateTimeRange step. minValue:" + minValue + " 
maxValue:" + maxValue);
         segment.setDateRangeStart(minValue);

Reply via email to