Author: xuefu
Date: Tue Nov 25 12:46:37 2014
New Revision: 1641612

URL: http://svn.apache.org/r1641612
Log:
HIVE-8951: Spark remote context doesn't work with local-cluster [Spark Branch]

Modified:
    
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java

Modified: 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1641612&r1=1641611&r2=1641612&view=diff
==============================================================================
--- 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
 (original)
+++ 
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
 Tue Nov 25 12:46:37 2014
@@ -48,8 +48,11 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 class SparkClientImpl implements SparkClient {
+  private static final long serialVersionUID = 1L;
 
-  private final static Logger LOG = 
LoggerFactory.getLogger(SparkClientImpl.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(SparkClientImpl.class);
+  
+  private static final String DEFAULT_CONNECTION_TIMEOUT = "60"; // In seconds
 
   private final Map<String, String> conf;
   private final AtomicInteger childIdGenerator;
@@ -68,8 +71,8 @@ class SparkClientImpl implements SparkCl
     this.jobs = Maps.newConcurrentMap();
     this.driverThread = startDriver();
 
-    long connectTimeout = Integer.parseInt(
-        
Optional.fromNullable(conf.get("spark.client.connectTimeout")).or("10")) * 1000;
+    long connectTimeout = 1000 * Integer.parseInt(
+        
Optional.fromNullable(conf.get("spark.client.connectTimeout")).or(DEFAULT_CONNECTION_TIMEOUT));
     long endTime = System.currentTimeMillis() + connectTimeout;
 
     synchronized (this) {


Reply via email to