Author: xuefu
Date: Wed Dec 17 16:28:58 2014
New Revision: 1646290

URL: http://svn.apache.org/r1646290
Log:
HIVE-9059: Remove wrappers for SparkJobInfo and SparkStageInfo [Spark Branch]

Removed:
    
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/status/HiveSparkJobInfo.java
    
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/status/HiveSparkStageInfo.java
Modified:
    hive/branches/spark/pom.xml
    
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
    
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/KryoMessageCodec.java

Modified: hive/branches/spark/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/pom.xml?rev=1646290&r1=1646289&r2=1646290&view=diff
==============================================================================
--- hive/branches/spark/pom.xml (original)
+++ hive/branches/spark/pom.xml Wed Dec 17 16:28:58 2014
@@ -154,7 +154,7 @@
     <ST4.version>4.0.4</ST4.version>
     <tez.version>0.5.2</tez.version>
     <super-csv.version>2.2.0</super-csv.version>
-    <spark.version>1.2.0-SNAPSHOT</spark.version>
+    <spark.version>1.2.1-SNAPSHOT</spark.version>
     <scala.binary.version>2.10</scala.binary.version>
     <scala.version>2.10.4</scala.version>
     <tempus-fugit.version>1.1</tempus-fugit.version>

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java?rev=1646290&r1=1646289&r2=1646290&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
 Wed Dec 17 16:28:58 2014
@@ -32,8 +32,6 @@ import org.apache.hive.spark.client.Job;
 import org.apache.hive.spark.client.JobContext;
 import org.apache.hive.spark.client.JobHandle;
 import org.apache.hive.spark.client.SparkClient;
-import org.apache.hive.spark.client.status.HiveSparkJobInfo;
-import org.apache.hive.spark.client.status.HiveSparkStageInfo;
 import org.apache.spark.JobExecutionStatus;
 import org.apache.spark.SparkJobInfo;
 import org.apache.spark.SparkStageInfo;
@@ -158,7 +156,7 @@ public class RemoteSparkJobStatus implem
         };
       }
     }
-    JobHandle<HiveSparkJobInfo> getJobInfo = sparkClient.submit(
+    JobHandle<SparkJobInfo> getJobInfo = sparkClient.submit(
         new GetJobInfoJob(jobHandle.getClientJobId(), sparkJobId));
     try {
       return getJobInfo.get();
@@ -169,7 +167,7 @@ public class RemoteSparkJobStatus implem
   }
 
   private SparkStageInfo getSparkStageInfo(int stageId) {
-    JobHandle<HiveSparkStageInfo> getStageInfo = sparkClient.submit(new 
GetStageInfoJob(stageId));
+    JobHandle<SparkStageInfo> getStageInfo = sparkClient.submit(new 
GetStageInfoJob(stageId));
     try {
       return getStageInfo.get();
     } catch (Throwable t) {
@@ -178,7 +176,7 @@ public class RemoteSparkJobStatus implem
     }
   }
 
-  private static class GetJobInfoJob implements Job<HiveSparkJobInfo> {
+  private static class GetJobInfoJob implements Job<SparkJobInfo> {
     private final String clientJobId;
     private final int sparkJobId;
 
@@ -193,7 +191,7 @@ public class RemoteSparkJobStatus implem
     }
 
     @Override
-    public HiveSparkJobInfo call(JobContext jc) throws Exception {
+    public SparkJobInfo call(JobContext jc) throws Exception {
       SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId);
       if (jobInfo == null) {
         List<JavaFutureAction<?>> list = 
jc.getMonitoredJobs().get(clientJobId);
@@ -237,11 +235,11 @@ public class RemoteSparkJobStatus implem
           }
         };
       }
-      return new HiveSparkJobInfo(jobInfo);
+      return jobInfo;
     }
   }
 
-  private static class GetStageInfoJob implements Job<HiveSparkStageInfo> {
+  private static class GetStageInfoJob implements Job<SparkStageInfo> {
     private final int stageId;
 
     private GetStageInfoJob() {
@@ -254,9 +252,8 @@ public class RemoteSparkJobStatus implem
     }
 
     @Override
-    public HiveSparkStageInfo call(JobContext jc) throws Exception {
-      SparkStageInfo stageInfo = jc.sc().statusTracker().getStageInfo(stageId);
-      return stageInfo != null ? new HiveSparkStageInfo(stageInfo) : new 
HiveSparkStageInfo();
+    public SparkStageInfo call(JobContext jc) throws Exception {
+      return jc.sc().statusTracker().getStageInfo(stageId);
     }
   }
 

Modified: 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/KryoMessageCodec.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/KryoMessageCodec.java?rev=1646290&r1=1646289&r2=1646290&view=diff
==============================================================================
--- 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/KryoMessageCodec.java
 (original)
+++ 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/KryoMessageCodec.java
 Wed Dec 17 16:28:58 2014
@@ -26,6 +26,7 @@ import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.ByteBufferInputStream;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
+import 
com.esotericsoftware.shaded.org.objenesis.strategy.StdInstantiatorStrategy;
 import com.google.common.base.Preconditions;
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
@@ -57,6 +58,7 @@ class KryoMessageCodec extends ByteToMes
         kryo.register(klass, REG_ID_BASE + count);
         count++;
       }
+      kryo.setInstantiatorStrategy(new StdInstantiatorStrategy());
       return kryo;
     }
   };


Reply via email to