Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
 Tue Oct 14 19:06:45 2014
@@ -22,17 +22,10 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hive.service.cli.thrift.TCLIService;
-import org.apache.hive.service.cli.thrift.TCLIService.Iface;
-import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.http.protocol.BasicHttpContext;
 import org.apache.http.protocol.HttpContext;
-import org.apache.thrift.TProcessor;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.transport.TTransport;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
 import org.ietf.jgss.GSSManager;
@@ -48,11 +41,7 @@ public final class HttpAuthUtils {
   public static final String AUTHORIZATION = "Authorization";
   public static final String BASIC = "Basic";
   public static final String NEGOTIATE = "Negotiate";
-
-  public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) 
{
-    return new HttpCLIServiceProcessorFactory(service);
-  }
-
+  
   /**
    * @return Stringified Base64 encoded kerberosAuthHeader on success
    */
@@ -62,7 +51,7 @@ public final class HttpAuthUtils {
     String serverPrincipal = getServerPrincipal(principal, host);
     // Uses the Ticket Granting Ticket in the UserGroupInformation
     return clientUGI.doAs(
-      new HttpKerberosClientAction(serverPrincipal, 
clientUGI.getShortUserName(), serverHttpUrl));
+      new HttpKerberosClientAction(serverPrincipal, clientUGI.getUserName(), 
serverHttpUrl));
   }
 
   /**
@@ -87,26 +76,6 @@ public final class HttpAuthUtils {
     throw new UnsupportedOperationException("Can't initialize class");
   }
 
-  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory 
{
-
-    private final ThriftCLIService service;
-    private final HiveConf hiveConf;
-    private final boolean isDoAsEnabled;
-
-    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      hiveConf = service.getHiveConf();
-      isDoAsEnabled = 
hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
-      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) : 
baseProcessor;
-    }
-  }
-
   public static class HttpKerberosClientAction implements 
PrivilegedExceptionAction<String> {
 
     public static final String HTTP_RESPONSE = "HTTP_RESPONSE";

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
 Tue Oct 14 19:06:45 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hive.service.auth;
 
-import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor;
 import org.apache.hive.service.cli.thrift.TCLIService;
 import org.apache.hive.service.cli.thrift.TCLIService.Iface;
 import org.apache.thrift.TException;
@@ -43,7 +42,7 @@ import org.slf4j.LoggerFactory;
  */
 public class TSetIpAddressProcessor<I extends Iface> extends 
TCLIService.Processor<Iface> {
 
-  private static final Logger LOGGER = 
LoggerFactory.getLogger(Processor.class.getName());
+  private static final Logger LOGGER = 
LoggerFactory.getLogger(TSetIpAddressProcessor.class.getName());
 
   public TSetIpAddressProcessor(Iface iface) {
     super(iface);
@@ -75,7 +74,7 @@ public class TSetIpAddressProcessor<I ex
     if (tSocket == null) {
       LOGGER.warn("Unknown Transport, cannot determine ipAddress");
     } else {
-      
THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().toString());
+      
THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().getHostAddress());
     }
   }
 

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/CLIService.java 
(original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/CLIService.java 
Tue Oct 14 19:06:45 2014
@@ -44,6 +44,7 @@ import org.apache.hive.service.auth.Hive
 import org.apache.hive.service.cli.operation.Operation;
 import org.apache.hive.service.cli.session.SessionManager;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
+import org.apache.hive.service.server.HiveServer2;
 
 /**
  * CLIService.
@@ -64,15 +65,18 @@ public class CLIService extends Composit
   private SessionManager sessionManager;
   private UserGroupInformation serviceUGI;
   private UserGroupInformation httpUGI;
+  // The HiveServer2 instance running this service
+  private final HiveServer2 hiveServer2;
 
-  public CLIService() {
-    super("CLIService");
+  public CLIService(HiveServer2 hiveServer2) {
+    super(CLIService.class.getSimpleName());
+    this.hiveServer2 = hiveServer2;
   }
 
   @Override
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
-    sessionManager = new SessionManager();
+    sessionManager = new SessionManager(hiveServer2);
     addService(sessionManager);
     //  If the hadoop cluster is secure, do a kerberos login for the service 
from the keytab
     if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
@@ -470,4 +474,8 @@ public class CLIService extends Composit
     sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, 
tokenStr);
     LOG.info(sessionHandle  + ": renewDelegationToken()");
   }
+
+  public SessionManager getSessionManager() {
+    return sessionManager;
+  }
 }

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
 Tue Oct 14 19:06:45 2014
@@ -47,7 +47,7 @@ public class OperationManager extends Ab
       new HashMap<OperationHandle, Operation>();
 
   public OperationManager() {
-    super("OperationManager");
+    super(OperationManager.class.getSimpleName());
   }
 
   @Override

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
 Tue Oct 14 19:06:45 2014
@@ -166,15 +166,20 @@ public class HiveSessionImpl implements 
     IHiveFileProcessor processor = new GlobalHivercFileProcessor();
 
     try {
-      if (hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION) != null) {
-        String hiverc = 
hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION)
-            + File.separator + SessionManager.HIVERCFILE;
-        if (new File(hiverc).exists()) {
-          LOG.info("Running global init file: " + hiverc);
-          int rc = processor.processFile(hiverc);
+      String hiverc = 
hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION);
+      if (hiverc != null) {
+        File hivercFile = new File(hiverc);
+        if (hivercFile.isDirectory()) {
+          hivercFile = new File(hivercFile, SessionManager.HIVERCFILE);
+        }
+        if (hivercFile.isFile()) {
+          LOG.info("Running global init file: " + hivercFile);
+          int rc = processor.processFile(hivercFile.getAbsolutePath());
           if (rc != 0) {
-            LOG.warn("Failed on initializing global .hiverc file");
+            LOG.error("Failed on initializing global .hiverc file");
           }
+        } else {
+          LOG.debug("Global init file " + hivercFile + " does not exist");
         }
       }
     } catch (IOException e) {

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/SessionManager.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
 Tue Oct 14 19:06:45 2014
@@ -43,6 +43,7 @@ import org.apache.hive.service.cli.HiveS
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.operation.OperationManager;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
+import org.apache.hive.service.server.HiveServer2;
 import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
 
 /**
@@ -65,9 +66,12 @@ public class SessionManager extends Comp
   private long sessionTimeout;
 
   private volatile boolean shutdown;
+  // The HiveServer2 instance running this service
+  private final HiveServer2 hiveServer2;
 
-  public SessionManager() {
-    super("SessionManager");
+  public SessionManager(HiveServer2 hiveServer2) {
+    super(SessionManager.class.getSimpleName());
+    this.hiveServer2 = hiveServer2;
   }
 
   @Override
@@ -229,6 +233,23 @@ public class SessionManager extends Comp
     return openSession(protocol, username, password, ipAddress, sessionConf, 
false, null);
   }
 
+  /**
+   * Opens a new session and creates a session handle.
+   * The username passed to this method is the effective username.
+   * If withImpersonation is true (==doAs true) we wrap all the calls in 
HiveSession
+   * within a UGI.doAs, where UGI corresponds to the effective user.
+   * @see org.apache.hive.service.cli.thrift.ThriftCLIService#getUserName()
+   *
+   * @param protocol
+   * @param username
+   * @param password
+   * @param ipAddress
+   * @param sessionConf
+   * @param withImpersonation
+   * @param delegationToken
+   * @return
+   * @throws HiveSQLException
+   */
   public SessionHandle openSession(TProtocolVersion protocol, String username, 
String password, String ipAddress,
       Map<String, String> sessionConf, boolean withImpersonation, String 
delegationToken)
           throws HiveSQLException {
@@ -271,6 +292,24 @@ public class SessionManager extends Comp
       throw new HiveSQLException("Session does not exist!");
     }
     session.close();
+    // Shutdown HiveServer2 if it has been deregistered from ZooKeeper and has 
no active sessions
+    if (!(hiveServer2 == null) && 
(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY))
+        && (!hiveServer2.isRegisteredWithZooKeeper())) {
+      // Asynchronously shutdown this instance of HiveServer2,
+      // if there are no active client sessions
+      if (getOpenSessionCount() == 0) {
+        LOG.info("This instance of HiveServer2 has been removed from the list 
of server "
+            + "instances available for dynamic service discovery. "
+            + "The last client session has ended - will shutdown now.");
+        Thread shutdownThread = new Thread() {
+          @Override
+          public void run() {
+            hiveServer2.stop();
+          }
+        };
+        shutdownThread.start();
+      }
+    }
   }
 
   public HiveSession getSession(SessionHandle sessionHandle) throws 
HiveSQLException {
@@ -356,5 +395,8 @@ public class SessionManager extends Comp
     return backgroundOperationPool.submit(r);
   }
 
+  public int getOpenSessionCount() {
+    return handleToSession.size();
+  }
 }
 

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java
 Tue Oct 14 19:06:45 2014
@@ -30,7 +30,7 @@ import org.apache.hive.service.cli.ICLIS
 public class EmbeddedThriftBinaryCLIService extends ThriftBinaryCLIService {
 
   public EmbeddedThriftBinaryCLIService() {
-    super(new CLIService());
+    super(new CLIService(null));
     isEmbedded = true;
     HiveConf.setLoadHiveServer2Config(true);
     cliService.init(new HiveConf());

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
 Tue Oct 14 19:06:45 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hive.service.cli.thrift;
 
-import java.net.InetSocketAddress;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.SynchronousQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -40,72 +39,54 @@ import org.apache.thrift.transport.TTran
 public class ThriftBinaryCLIService extends ThriftCLIService {
 
   public ThriftBinaryCLIService(CLIService cliService) {
-    super(cliService, "ThriftBinaryCLIService");
+    super(cliService, ThriftBinaryCLIService.class.getSimpleName());
   }
 
   @Override
   public void run() {
     try {
-      hiveAuthFactory = new HiveAuthFactory(hiveConf);
-      TTransportFactory  transportFactory = 
hiveAuthFactory.getAuthTransFactory();
-      TProcessorFactory processorFactory = 
hiveAuthFactory.getAuthProcFactory(this);
-
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
-      }
-
-      String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
-      if (hiveHost == null) {
-        hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
-      }
-
-      if (hiveHost != null && !hiveHost.isEmpty()) {
-        serverAddress = new InetSocketAddress(hiveHost, portNum);
-      } else {
-        serverAddress = new  InetSocketAddress(portNum);
-      }
-
-      minWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
-      maxWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
-      workerKeepAliveTime = hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, 
TimeUnit.SECONDS);
+      // Server thread pool
       String threadPoolName = "HiveServer2-Handler-Pool";
       ExecutorService executorService = new 
ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
           workerKeepAliveTime, TimeUnit.SECONDS, new 
SynchronousQueue<Runnable>(),
           new ThreadFactoryWithGarbageCleanup(threadPoolName));
 
+      // Thrift configs
+      hiveAuthFactory = new HiveAuthFactory(hiveConf);
+      TTransportFactory transportFactory = 
hiveAuthFactory.getAuthTransFactory();
+      TProcessorFactory processorFactory = 
hiveAuthFactory.getAuthProcFactory(this);
       TServerSocket serverSocket = null;
       if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
         serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum);
       } else {
         String keyStorePath = 
hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
         if (keyStorePath.isEmpty()) {
-          throw new 
IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
-              " Not configured for SSL connection");
+          throw new 
IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+              + " Not configured for SSL connection");
         }
         String keyStorePassword = 
ShimLoader.getHadoopShims().getPassword(hiveConf,
             HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
-        serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum,
-            keyStorePath, keyStorePassword);
+        serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum, 
keyStorePath,
+            keyStorePassword);
       }
+
+      // Server args
       TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
-      .processorFactory(processorFactory)
-      .transportFactory(transportFactory)
-      .protocolFactory(new TBinaryProtocol.Factory())
-      .executorService(executorService);
+          
.processorFactory(processorFactory).transportFactory(transportFactory)
+          .protocolFactory(new 
TBinaryProtocol.Factory()).executorService(executorService);
 
+      // TCP Server
       server = new TThreadPoolServer(sargs);
-
-      LOG.info("ThriftBinaryCLIService listening on " + serverAddress);
-
       server.serve();
-
+      String msg = "Started " + ThriftBinaryCLIService.class.getSimpleName() + 
" on port "
+          + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + 
" worker threads";
+      LOG.info(msg);
     } catch (Throwable t) {
-      LOG.error("Error: ", t);
+      LOG.fatal(
+          "Error starting HiveServer2: could not start "
+              + ThriftBinaryCLIService.class.getSimpleName(), t);
+      System.exit(-1);
     }
-
   }
+
 }

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
 Tue Oct 14 19:06:45 2014
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
 
 import javax.security.auth.login.LoginException;
 
@@ -34,6 +35,7 @@ import org.apache.hive.service.auth.Hive
 import org.apache.hive.service.auth.TSetIpAddressProcessor;
 import org.apache.hive.service.cli.*;
 import org.apache.hive.service.cli.session.SessionManager;
+import org.apache.hive.service.server.HiveServer2;
 import org.apache.thrift.TException;
 import org.apache.thrift.server.TServer;
 
@@ -48,9 +50,11 @@ public abstract class ThriftCLIService e
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new 
TStatus(TStatusCode.SUCCESS_STATUS);
   private static final TStatus ERROR_STATUS = new 
TStatus(TStatusCode.ERROR_STATUS);
+  protected static HiveAuthFactory hiveAuthFactory;
 
   protected int portNum;
   protected InetSocketAddress serverAddress;
+  protected String hiveHost;
   protected TServer server;
   protected org.eclipse.jetty.server.Server httpServer;
 
@@ -63,8 +67,6 @@ public abstract class ThriftCLIService e
   protected int maxWorkerThreads;
   protected long workerKeepAliveTime;
 
-  protected static HiveAuthFactory hiveAuthFactory;
-
   public ThriftCLIService(CLIService cliService, String serviceName) {
     super(serviceName);
     this.cliService = cliService;
@@ -73,6 +75,43 @@ public abstract class ThriftCLIService e
   @Override
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
+
+    // Initialize common server configs needed in both binary & http modes
+    String portString;
+    hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
+    if (hiveHost == null) {
+      hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+    }
+    // HTTP mode
+    if (HiveServer2.isHTTPTransportMode(hiveConf)) {
+      workerKeepAliveTime =
+          
hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME,
+              TimeUnit.SECONDS);
+      portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
+      if (portString != null) {
+        portNum = Integer.valueOf(portString);
+      } else {
+        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
+      }
+    }
+    // Binary mode
+    else {
+      workerKeepAliveTime =
+          
hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, 
TimeUnit.SECONDS);
+      portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
+      if (portString != null) {
+        portNum = Integer.valueOf(portString);
+      } else {
+        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
+      }
+    }
+    if (hiveHost != null && !hiveHost.isEmpty()) {
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    } else {
+      serverAddress = new InetSocketAddress(portNum);
+    }
+    minWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
+    maxWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
     super.init(hiveConf);
   }
 
@@ -105,6 +144,14 @@ public abstract class ThriftCLIService e
     super.stop();
   }
 
+  public int getPortNumber() {
+    return portNum;
+  }
+
+  public InetSocketAddress getServerAddress() {
+    return serverAddress;
+  }
+
   @Override
   public TGetDelegationTokenResp GetDelegationToken(TGetDelegationTokenReq req)
       throws TException {
@@ -214,6 +261,16 @@ public abstract class ThriftCLIService e
     return clientIpAddress;
   }
 
+  /**
+   * Returns the effective username.
+   * 1. If hive.server2.allow.user.substitution = false: the username of the 
connecting user
+   * 2. If hive.server2.allow.user.substitution = true: the username of the 
end user,
+   * that the connecting user is trying to proxy for.
+   * This includes a check whether the connecting user is allowed to proxy for 
the end user.
+   * @param req
+   * @return
+   * @throws HiveSQLException
+   */
   private String getUserName(TOpenSessionReq req) throws HiveSQLException {
     String userName = null;
     // Kerberos
@@ -590,6 +647,4 @@ public abstract class ThriftCLIService e
     return 
cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
         .equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString());
   }
-
 }
-

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
 Tue Oct 14 19:06:45 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes;
 import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
 import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
 import org.apache.thrift.TProcessor;
 import org.apache.thrift.TProcessorFactory;
@@ -48,100 +49,93 @@ import org.eclipse.jetty.util.thread.Exe
 public class ThriftHttpCLIService extends ThriftCLIService {
 
   public ThriftHttpCLIService(CLIService cliService) {
-    super(cliService, "ThriftHttpCLIService");
+    super(cliService, ThriftHttpCLIService.class.getSimpleName());
   }
 
+  /**
+   * Configure Jetty to serve http requests. Example of a client connection 
URL:
+   * http://localhost:10000/servlets/thrifths2/ A gateway may cause actual 
target URL to differ,
+   * e.g. http://gateway:port/hive2/servlets/thrifths2/
+   */
   @Override
   public void run() {
     try {
-      // Configure Jetty to serve http requests
-      // Example of a client connection URL: 
http://localhost:10000/servlets/thrifths2/
-      // a gateway may cause actual target URL to differ, e.g. 
http://gateway:port/hive2/servlets/thrifths2/
-
+      // Verify config validity
       verifyHttpConfiguration(hiveConf);
 
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
-      }
-
-      minWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS);
-      maxWorkerThreads = 
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS);
-      workerKeepAliveTime = hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME, 
TimeUnit.SECONDS);
-
-      String httpPath =  
getHttpPath(hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
-
+      // HTTP Server
       httpServer = new org.eclipse.jetty.server.Server();
+
+      // Server thread pool
       String threadPoolName = "HiveServer2-HttpHandler-Pool";
       ExecutorService executorService = new 
ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
           workerKeepAliveTime, TimeUnit.SECONDS, new 
LinkedBlockingQueue<Runnable>(),
           new ThreadFactoryWithGarbageCleanup(threadPoolName));
-
       ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
       httpServer.setThreadPool(threadPool);
 
-      SelectChannelConnector connector = new SelectChannelConnector();;
+      // Connector configs
+      SelectChannelConnector connector = new SelectChannelConnector();
       boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
       String schemeName = useSsl ? "https" : "http";
-      String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
-      // Set during the init phase of HiveServer2 if auth mode is kerberos
-      // UGI for the hive/_HOST (kerberos) principal
-      UserGroupInformation serviceUGI = cliService.getServiceUGI();
-      // UGI for the http/_HOST (SPNego) principal
-      UserGroupInformation httpUGI = cliService.getHttpUGI();
-
+      // Change connector if SSL is used
       if (useSsl) {
         String keyStorePath = 
hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
         String keyStorePassword = 
ShimLoader.getHadoopShims().getPassword(hiveConf,
             HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
         if (keyStorePath.isEmpty()) {
-          throw new 
IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
-              " Not configured for SSL connection");
+          throw new 
IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+              + " Not configured for SSL connection");
         }
         SslContextFactory sslContextFactory = new SslContextFactory();
         sslContextFactory.setKeyStorePath(keyStorePath);
         sslContextFactory.setKeyStorePassword(keyStorePassword);
         connector = new SslSelectChannelConnector(sslContextFactory);
       }
-      
       connector.setPort(portNum);
       // Linux:yes, Windows:no
       connector.setReuseAddress(!Shell.WINDOWS);
-      
-      int maxIdleTime = (int) hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME, 
TimeUnit.MILLISECONDS);
+      int maxIdleTime = (int) 
hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME,
+          TimeUnit.MILLISECONDS);
       connector.setMaxIdleTime(maxIdleTime);
-      
+
       httpServer.addConnector(connector);
 
+      // Thrift configs
       hiveAuthFactory = new HiveAuthFactory(hiveConf);
-      TProcessorFactory processorFactory = 
hiveAuthFactory.getAuthProcFactory(this);
-      TProcessor processor = processorFactory.getProcessor(null);
-
+      TProcessor processor = new TCLIService.Processor<Iface>(this);
       TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+      // Set during the init phase of HiveServer2 if auth mode is kerberos
+      // UGI for the hive/_HOST (kerberos) principal
+      UserGroupInformation serviceUGI = cliService.getServiceUGI();
+      // UGI for the http/_HOST (SPNego) principal
+      UserGroupInformation httpUGI = cliService.getHttpUGI();
+      String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
+      TServlet thriftHttpServlet = new ThriftHttpServlet(processor, 
protocolFactory, authType,
+          serviceUGI, httpUGI);
 
-      TServlet thriftHttpServlet = new ThriftHttpServlet(processor, 
protocolFactory,
-          authType, serviceUGI, httpUGI);
-
+      // Context handler
       final ServletContextHandler context = new ServletContextHandler(
           ServletContextHandler.SESSIONS);
       context.setContextPath("/");
-
+      String httpPath = getHttpPath(hiveConf
+          .getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
       httpServer.setHandler(context);
       context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
 
       // TODO: check defaults: maxTimeout, keepalive, maxBodySize, 
bodyRecieveDuration, etc.
+      // Finally, start the server
       httpServer.start();
-      String msg = "Started ThriftHttpCLIService in " + schemeName + " mode on 
port " + portNum +
-          " path=" + httpPath +
-          " with " + minWorkerThreads + ".." + maxWorkerThreads + " worker 
threads";
+      String msg = "Started " + ThriftHttpCLIService.class.getSimpleName() + " 
in " + schemeName
+          + " mode on port " + portNum + " path=" + httpPath + " with " + 
minWorkerThreads + "..."
+          + maxWorkerThreads + " worker threads";
       LOG.info(msg);
       httpServer.join();
     } catch (Throwable t) {
-      LOG.error("Error: ", t);
+      LOG.fatal(
+          "Error starting HiveServer2: could not start "
+              + ThriftHttpCLIService.class.getSimpleName(), t);
+      System.exit(-1);
     }
   }
 
@@ -191,7 +185,8 @@ public class ThriftHttpCLIService extend
       // NONE in case of thrift mode uses SASL
       LOG.warn(ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting to " +
           authType + ". SASL is not supported with http transport mode," +
-          " so using equivalent of " + AuthTypes.NOSASL);
+ " so using equivalent of "
+          + AuthTypes.NOSASL);
     }
   }
 

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
 Tue Oct 14 19:06:45 2014
@@ -31,6 +31,8 @@ import org.apache.commons.codec.binary.B
 import org.apache.commons.codec.binary.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.service.auth.AuthenticationProviderFactory;
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
@@ -219,7 +221,7 @@ public class ThriftHttpServlet extends T
               "provided by the client.");
         }
         else {
-          return getPrincipalWithoutRealm(gssContext.getSrcName().toString());
+          return 
getPrincipalWithoutRealmAndHost(gssContext.getSrcName().toString());
         }
       }
       catch (GSSException e) {
@@ -236,9 +238,32 @@ public class ThriftHttpServlet extends T
       }
     }
 
-    private String getPrincipalWithoutRealm(String fullPrincipal) {
-      String names[] = fullPrincipal.split("[@]");
-      return names[0];
+    private String getPrincipalWithoutRealm(String fullPrincipal)
+        throws HttpAuthenticationException {
+      KerberosNameShim fullKerberosName;
+      try {
+        fullKerberosName = 
ShimLoader.getHadoopShims().getKerberosNameShim(fullPrincipal);
+      } catch (IOException e) {
+        throw new HttpAuthenticationException(e);
+      }
+      String serviceName = fullKerberosName.getServiceName();
+      String hostName = fullKerberosName.getHostName();
+      String principalWithoutRealm = serviceName;
+      if (hostName != null) {
+        principalWithoutRealm = serviceName + "/" + hostName;
+      }
+      return principalWithoutRealm;
+    }
+
+    private String getPrincipalWithoutRealmAndHost(String fullPrincipal)
+        throws HttpAuthenticationException {
+      KerberosNameShim fullKerberosName;
+      try {
+        fullKerberosName = 
ShimLoader.getHadoopShims().getKerberosNameShim(fullPrincipal);
+      } catch (IOException e) {
+        throw new HttpAuthenticationException(e);
+      }
+      return fullKerberosName.getServiceName();
     }
   }
 

Modified: 
hive/branches/llap/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/java/org/apache/hive/service/server/HiveServer2.java
 (original)
+++ 
hive/branches/llap/service/src/java/org/apache/hive/service/server/HiveServer2.java
 Tue Oct 14 19:06:45 2014
@@ -18,6 +18,16 @@
 
 package org.apache.hive.service.server;
 
+import java.nio.charset.Charset;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.LogUtils;
@@ -25,12 +35,20 @@ import org.apache.hadoop.hive.common.Log
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
+import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper;
 import org.apache.hive.common.util.HiveStringUtils;
+import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.service.CompositeService;
 import org.apache.hive.service.cli.CLIService;
 import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooKeeper;
 
 /**
  * HiveServer2.
@@ -41,31 +59,142 @@ public class HiveServer2 extends Composi
 
   private CLIService cliService;
   private ThriftCLIService thriftCLIService;
+  private String znodePath;
+  private ZooKeeper zooKeeperClient;
+  private boolean registeredWithZooKeeper = false;
 
   public HiveServer2() {
-    super("HiveServer2");
+    super(HiveServer2.class.getSimpleName());
     HiveConf.setLoadHiveServer2Config(true);
   }
 
 
   @Override
   public synchronized void init(HiveConf hiveConf) {
-    cliService = new CLIService();
+    cliService = new CLIService(this);
     addService(cliService);
+    if (isHTTPTransportMode(hiveConf)) {
+      thriftCLIService = new ThriftHttpCLIService(cliService);
+    } else {
+      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    }
+    addService(thriftCLIService);
+    super.init(hiveConf);
+
+    // Add a shutdown hook for catching SIGTERM & SIGINT
+    final HiveServer2 hiveServer2 = this;
+    Runtime.getRuntime().addShutdownHook(new Thread() {
+      @Override
+      public void run() {
+        hiveServer2.stop();
+      }
+    });
+  }
 
+  public static boolean isHTTPTransportMode(HiveConf hiveConf) {
     String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
-    if(transportMode == null) {
+    if (transportMode == null) {
       transportMode = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
     }
-    if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
-      thriftCLIService = new ThriftHttpCLIService(cliService);
+    if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
+      return true;
     }
-    else {
-      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    return false;
+  }
+
+  /**
+   * Adds a server instance to ZooKeeper as a znode.
+   *
+   * @param hiveConf
+   * @throws Exception
+   */
+  private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws 
Exception {
+    int zooKeeperSessionTimeout =
+        hiveConf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
+    String zooKeeperEnsemble = ZooKeeperHiveHelper.getQuorumServers(hiveConf);
+    String rootNamespace = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
+    String instanceURI = getServerInstanceURI(hiveConf);
+    byte[] znodeDataUTF8 = instanceURI.getBytes(Charset.forName("UTF-8"));
+    zooKeeperClient =
+        new ZooKeeper(zooKeeperEnsemble, zooKeeperSessionTimeout,
+            new ZooKeeperHiveHelper.DummyWatcher());
+
+    // Create the parent znodes recursively; ignore if the parent already 
exists
+    try {
+      ZooKeeperHiveHelper.createPathRecursively(zooKeeperClient, rootNamespace,
+          Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+      LOG.info("Created the root name space: " + rootNamespace + " on 
ZooKeeper for HiveServer2");
+    } catch (KeeperException e) {
+      if (e.code() != KeeperException.Code.NODEEXISTS) {
+        LOG.fatal("Unable to create HiveServer2 namespace: " + rootNamespace + 
" on ZooKeeper", e);
+        throw (e);
+      }
+    }
+    // Create a znode under the rootNamespace parent for this instance of the 
server
+    // Znode name: 
serverUri=host:port;version=versionInfo;sequence=sequenceNumber
+    try {
+      String znodePath =
+          ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace
+              + ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + "serverUri=" + 
instanceURI + ";"
+              + "version=" + HiveVersionInfo.getVersion() + ";" + "sequence=";
+      znodePath =
+          zooKeeperClient.create(znodePath, znodeDataUTF8, Ids.OPEN_ACL_UNSAFE,
+              CreateMode.EPHEMERAL_SEQUENTIAL);
+      setRegisteredWithZooKeeper(true);
+      // Set a watch on the znode
+      if (zooKeeperClient.exists(znodePath, new DeRegisterWatcher()) == null) {
+        // No node exists, throw exception
+        throw new Exception("Unable to create znode for this HiveServer2 
instance on ZooKeeper.");
+      }
+      LOG.info("Created a znode on ZooKeeper for HiveServer2 uri: " + 
instanceURI);
+    } catch (KeeperException e) {
+      LOG.fatal("Unable to create a znode for this server instance", e);
+      throw new Exception(e);
     }
+  }
 
-    addService(thriftCLIService);
-    super.init(hiveConf);
+  /**
+   * The watcher class which sets the de-register flag when the znode 
corresponding to this server
+   * instance is deleted. Additionally, it shuts down the server if there are 
no more active client
+   * sessions at the time of receiving a 'NodeDeleted' notification from 
ZooKeeper.
+   */
+  private class DeRegisterWatcher implements Watcher {
+    public void process(WatchedEvent event) {
+      if (event.getType().equals(Watcher.Event.EventType.NodeDeleted)) {
+        HiveServer2.this.setRegisteredWithZooKeeper(false);
+        // If there are no more active client sessions, stop the server
+        if (cliService.getSessionManager().getOpenSessionCount() == 0) {
+          LOG.warn("This instance of HiveServer2 has been removed from the 
list of server "
+              + "instances available for dynamic service discovery. "
+              + "The last client session has ended - will shutdown now.");
+          HiveServer2.this.stop();
+        }
+        LOG.warn("This HiveServer2 instance is now de-registered from 
ZooKeeper. "
+            + "The server will be shut down after the last client sesssion 
completes.");
+      }
+    }
+  }
+
+  private void removeServerInstanceFromZooKeeper() throws Exception {
+    setRegisteredWithZooKeeper(false);
+    zooKeeperClient.close();
+    LOG.info("Server instance removed from ZooKeeper.");
+  }
+
+  public boolean isRegisteredWithZooKeeper() {
+    return registeredWithZooKeeper;
+  }
+
+  private void setRegisteredWithZooKeeper(boolean registeredWithZooKeeper) {
+    this.registeredWithZooKeeper = registeredWithZooKeeper;
+  }
+
+  private String getServerInstanceURI(HiveConf hiveConf) throws Exception {
+    if ((thriftCLIService == null) || (thriftCLIService.getServerAddress() == 
null)) {
+      throw new Exception("Unable to get the server address; it hasn't been 
initialized yet.");
+    }
+    return thriftCLIService.getServerAddress().getHostName() + ":"
+        + thriftCLIService.getPortNumber();
   }
 
   @Override
@@ -75,23 +204,33 @@ public class HiveServer2 extends Composi
 
   @Override
   public synchronized void stop() {
-    super.stop();
-    // there should already be an instance of the session pool manager.
-    // if not, ignoring is fine while stopping the hive server.
+    LOG.info("Shutting down HiveServer2");
     HiveConf hiveConf = this.getHiveConf();
+    super.stop();
+    // Remove this server instance from ZooKeeper if dynamic service discovery 
is set
+    if 
(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY)) {
+      try {
+        removeServerInstanceFromZooKeeper();
+      } catch (Exception e) {
+        LOG.error("Error removing znode for this HiveServer2 instance from 
ZooKeeper.", e);
+      }
+    }
+    // There should already be an instance of the session pool manager.
+    // If not, ignoring is fine while stopping HiveServer2.
     if 
(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_TEZ_INITIALIZE_DEFAULT_SESSIONS)) {
       try {
         TezSessionPoolManager.getInstance().stop();
       } catch (Exception e) {
-        LOG.error("Tez session pool manager stop had an error during stop of 
hive server");
-        e.printStackTrace();
+        LOG.error("Tez session pool manager stop had an error during stop of 
HiveServer2. "
+            + "Shutting down HiveServer2 anyway.", e);
       }
     }
   }
 
   private static void startHiveServer2() throws Throwable {
     long attempts = 0, maxAttempts = 1;
-    while(true) {
+    while (true) {
+      LOG.info("Starting HiveServer2");
       HiveConf hiveConf = new HiveConf();
       maxAttempts = 
hiveConf.getLongVar(HiveConf.ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS);
       HiveServer2 server = null;
@@ -99,6 +238,11 @@ public class HiveServer2 extends Composi
         server = new HiveServer2();
         server.init(hiveConf);
         server.start();
+        // If we're supporting dynamic service discovery, we'll add the 
service uri for this
+        // HiveServer2 instance to Zookeeper as a znode.
+        if 
(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY)) {
+          server.addServerInstanceToZooKeeper(hiveConf);
+        }
         if 
(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_TEZ_INITIALIZE_DEFAULT_SESSIONS)) {
           TezSessionPoolManager sessionPool = 
TezSessionPoolManager.getInstance();
           sessionPool.setupPool(hiveConf);
@@ -106,19 +250,19 @@ public class HiveServer2 extends Composi
         }
         break;
       } catch (Throwable throwable) {
-        if(++attempts >= maxAttempts) {
+        if (++attempts >= maxAttempts) {
           throw new Error("Max start attempts " + maxAttempts + " exhausted", 
throwable);
         } else {
-          LOG.warn("Error starting HiveServer2 on attempt " + attempts +
-            ", will retry in 60 seconds", throwable);
+          LOG.warn("Error starting HiveServer2 on attempt " + attempts
+              + ", will retry in 60 seconds", throwable);
           try {
             if (server != null) {
               server.stop();
               server = null;
             }
           } catch (Exception e) {
-            LOG.info("Exception caught when calling stop of HiveServer2 
before" +
-              " retrying start", e);
+            LOG.info(
+                "Exception caught when calling stop of HiveServer2 before" + " 
retrying start", e);
           }
           try {
             Thread.sleep(60L * 1000L);
@@ -130,32 +274,204 @@ public class HiveServer2 extends Composi
     }
   }
 
+  /**
+   * Remove all znodes corresponding to the given version number from ZooKeeper
+   *
+   * @param versionNumber
+   * @throws Exception
+   */
+  static void deleteServerInstancesFromZooKeeper(String versionNumber) throws 
Exception {
+    HiveConf hiveConf = new HiveConf();
+    int zooKeeperSessionTimeout =
+        hiveConf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
+    String zooKeeperEnsemble = ZooKeeperHiveHelper.getQuorumServers(hiveConf);
+    String rootNamespace = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
+    ZooKeeper zooKeeperClient =
+        new ZooKeeper(zooKeeperEnsemble, zooKeeperSessionTimeout,
+            new ZooKeeperHiveHelper.DummyWatcher());
+    // Get all znode paths
+    List<String> znodePaths =
+        
zooKeeperClient.getChildren(ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + 
rootNamespace,
+            false);
+    // Now for each path that is for the given versionNumber, delete the znode 
from ZooKeeper
+    for (String znodePath : znodePaths) {
+      if (znodePath.contains("version=" + versionNumber + ";")) {
+        zooKeeperClient.delete(ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + 
rootNamespace
+            + ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + znodePath, -1);
+      }
+    }
+  }
+
   public static void main(String[] args) {
     HiveConf.setLoadHiveServer2Config(true);
     try {
       ServerOptionsProcessor oproc = new ServerOptionsProcessor("hiveserver2");
-      if (!oproc.process(args)) {
-        System.err.println("Error starting HiveServer2 with given arguments");
-        System.exit(-1);
-      }
+      ServerOptionsProcessorResponse oprocResponse = oproc.parse(args);
 
-      //NOTE: It is critical to do this here so that log4j is reinitialized
+      // NOTE: It is critical to do this here so that log4j is reinitialized
       // before any of the other core hive classes are loaded
       String initLog4jMessage = LogUtils.initHiveLog4j();
       LOG.debug(initLog4jMessage);
-
       HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
-      //log debug message from "oproc" after log4j initialize properly
+
+      // Log debug message from "oproc" after log4j initialize properly
       LOG.debug(oproc.getDebugMessage().toString());
-      startHiveServer2();
+
+      // Call the executor which will execute the appropriate command based on 
the parsed options
+      oprocResponse.getServerOptionsExecutor().execute();
     } catch (LogInitializationException e) {
       LOG.error("Error initializing log: " + e.getMessage(), e);
       System.exit(-1);
-    } catch (Throwable t) {
-      LOG.fatal("Error starting HiveServer2", t);
-      System.exit(-1);
     }
   }
 
+  /**
+   * ServerOptionsProcessor.
+   * Process arguments given to HiveServer2 (-hiveconf property=value)
+   * Set properties in System properties
+   * Create an appropriate response object,
+   * which has executor to execute the appropriate command based on the parsed 
options.
+   */
+  static class ServerOptionsProcessor {
+    private final Options options = new Options();
+    private org.apache.commons.cli.CommandLine commandLine;
+    private final String serverName;
+    private StringBuilder debugMessage = new StringBuilder();
+
+    @SuppressWarnings("static-access")
+    ServerOptionsProcessor(String serverName) {
+      this.serverName = serverName;
+      // -hiveconf x=y
+      options.addOption(OptionBuilder
+          .withValueSeparator()
+          .hasArgs(2)
+          .withArgName("property=value")
+          .withLongOpt("hiveconf")
+          .withDescription("Use value for given property")
+          .create());
+      // -deregister <versionNumber>
+      options.addOption(OptionBuilder
+          .hasArgs(1)
+          .withArgName("versionNumber")
+          .withLongOpt("deregister")
+          .withDescription("Deregister all instances of given version from 
dynamic service discovery")
+          .create());
+      options.addOption(new Option("H", "help", false, "Print help 
information"));
+    }
+
+    ServerOptionsProcessorResponse parse(String[] argv) {
+      try {
+        commandLine = new GnuParser().parse(options, argv);
+        // Process --hiveconf
+        // Get hiveconf param values and set the System property values
+        Properties confProps = commandLine.getOptionProperties("hiveconf");
+        for (String propKey : confProps.stringPropertyNames()) {
+          // save logging message for log4j output latter after log4j 
initialize properly
+          debugMessage.append("Setting " + propKey + "=" + 
confProps.getProperty(propKey) + ";\n");
+          System.setProperty(propKey, confProps.getProperty(propKey));
+        }
+
+        // Process --help
+        if (commandLine.hasOption('H')) {
+          return new ServerOptionsProcessorResponse(new 
HelpOptionExecutor(serverName, options));
+        }
+
+        // Process --deregister
+        if (commandLine.hasOption("deregister")) {
+          return new ServerOptionsProcessorResponse(new 
DeregisterOptionExecutor(
+              commandLine.getOptionValue("deregister")));
+        }
+      } catch (ParseException e) {
+        // Error out & exit - we were not able to parse the args successfully
+        System.err.println("Error starting HiveServer2 with given arguments: 
");
+        System.err.println(e.getMessage());
+        System.exit(-1);
+      }
+      // Default executor, when no option is specified
+      return new ServerOptionsProcessorResponse(new StartOptionExecutor());
+    }
+
+    StringBuilder getDebugMessage() {
+      return debugMessage;
+    }
+  }
+
+  /**
+   * The response sent back from {@link ServerOptionsProcessor#parse(String[])}
+   */
+  static class ServerOptionsProcessorResponse {
+    private ServerOptionsExecutor serverOptionsExecutor;
+
+    ServerOptionsProcessorResponse(ServerOptionsExecutor 
serverOptionsExecutor) {
+      this.serverOptionsExecutor = serverOptionsExecutor;
+    }
+
+    ServerOptionsExecutor getServerOptionsExecutor() {
+      return serverOptionsExecutor;
+    }
+  }
+
+  /**
+   * The executor interface for running the appropriate HiveServer2 command 
based on parsed options
+   */
+  static interface ServerOptionsExecutor {
+    public void execute();
+  }
+
+  /**
+   * HelpOptionExecutor: executes the --help option by printing out the usage
+   */
+  static class HelpOptionExecutor implements ServerOptionsExecutor {
+    private final Options options;
+    private final String serverName;
+
+    HelpOptionExecutor(String serverName, Options options) {
+      this.options = options;
+      this.serverName = serverName;
+    }
+
+    public void execute() {
+      new HelpFormatter().printHelp(serverName, options);
+      System.exit(0);
+    }
+  }
+
+  /**
+   * StartOptionExecutor: starts HiveServer2.
+   * This is the default executor, when no option is specified.
+   */
+  static class StartOptionExecutor implements ServerOptionsExecutor {
+    public void execute() {
+      try {
+        startHiveServer2();
+      } catch (Throwable t) {
+        LOG.fatal("Error starting HiveServer2", t);
+        System.exit(-1);
+      }
+    }
+  }
+
+  /**
+   * DeregisterOptionExecutor: executes the --deregister option by
+   * deregistering all HiveServer2 instances from ZooKeeper of a specific 
version.
+   */
+  static class DeregisterOptionExecutor implements ServerOptionsExecutor {
+    private final String versionNumber;
+
+    DeregisterOptionExecutor(String versionNumber) {
+      this.versionNumber = versionNumber;
+    }
+
+    public void execute() {
+      try {
+        deleteServerInstancesFromZooKeeper(versionNumber);
+      } catch (Exception e) {
+        LOG.fatal("Error deregistering HiveServer2 instances for version: " + 
versionNumber
+            + " from ZooKeeper", e);
+        System.exit(-1);
+      }
+      System.exit(0);
+    }
+  }
 }
 

Modified: 
hive/branches/llap/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
 (original)
+++ 
hive/branches/llap/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
 Tue Oct 14 19:06:45 2014
@@ -39,7 +39,7 @@ public class TestPlainSaslHelper extends
         hconf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS));
 
 
-    CLIService cliService = new CLIService();
+    CLIService cliService = new CLIService(null);
     cliService.init(hconf);
     ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService);
     tcliService.init(hconf);

Modified: 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
 (original)
+++ 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
 Tue Oct 14 19:06:45 2014
@@ -150,7 +150,7 @@ public abstract class CLIServiceTest {
     client.closeOperation(opHandle);
 
     // Blocking execute
-    queryString = "SELECT ID FROM TEST_EXEC";
+    queryString = "SELECT ID+1 FROM TEST_EXEC";
     opHandle = client.executeStatement(sessionHandle, queryString, 
confOverlay);
     // Expect query to be completed now
     assertEquals("Query should be finished",
@@ -225,27 +225,27 @@ public abstract class CLIServiceTest {
     /**
      * Execute an async query with default config
      */
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, 
OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 0
      */
     longPollingTimeout = 0;
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, 
OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Execute an async query with long polling timeout set to 500 millis
      */
     longPollingTimeout = 500;
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     runQueryAsync(sessionHandle, queryString, confOverlay, 
OperationState.FINISHED, longPollingTimeout);
 
     /**
      * Cancellation test
      */
-    queryString = "SELECT ID FROM " + tableName;
+    queryString = "SELECT ID+1 FROM " + tableName;
     opHandle = client.executeStatementAsync(sessionHandle, queryString, 
confOverlay);
     System.out.println("Cancelling " + opHandle);
     client.cancelOperation(opHandle);

Modified: 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
 (original)
+++ 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
 Tue Oct 14 19:06:45 2014
@@ -27,7 +27,11 @@ import junit.framework.TestCase;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.cli.*;
+import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.ICLIService;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
 import org.junit.After;
@@ -40,6 +44,7 @@ public class TestSessionGlobalInitFile e
   private ThriftCLIServiceClient client;
   private File initFile;
   private String tmpDir;
+  private HiveConf hiveConf;
 
   /**
    * This class is almost the same as EmbeddedThriftBinaryCLIService,
@@ -47,7 +52,7 @@ public class TestSessionGlobalInitFile e
    */
   private class FakeEmbeddedThriftBinaryCLIService extends 
ThriftBinaryCLIService {
     public FakeEmbeddedThriftBinaryCLIService(HiveConf hiveConf) {
-      super(new CLIService());
+      super(new CLIService(null));
       isEmbedded = true;
       cliService.init(hiveConf);
       cliService.start();
@@ -82,8 +87,8 @@ public class TestSessionGlobalInitFile e
     FileUtils.writeLines(initFile, Arrays.asList(fileContent));
 
     // set up service and client
-    HiveConf hiveConf = new HiveConf();
-    hiveConf.setVar(HiveConf.ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION,
+    hiveConf = new HiveConf();
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
         initFile.getParentFile().getAbsolutePath());
     service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
     service.init(new HiveConf());
@@ -98,11 +103,26 @@ public class TestSessionGlobalInitFile e
 
   @Test
   public void testSessionGlobalInitFile() throws Exception {
-    /**
-     * create session, and fetch the property set in global init file. Test if
-     * the global init file .hiverc is loaded correctly by checking the 
expected
-     * setting property.
-     */
+    File tmpInitFile = new File(initFile.getParent(), "hiverc");
+    Assert.assertTrue("Failed to rename " + initFile + " to " + tmpInitFile,
+      initFile.renameTo(tmpInitFile));
+    initFile = tmpInitFile;
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
+        initFile.getAbsolutePath());
+    doTestSessionGlobalInitFile();
+  }
+
+  @Test
+  public void testSessionGlobalInitDir() throws Exception {
+    doTestSessionGlobalInitFile();
+  }
+
+  /**
+   * create session, and fetch the property set in global init file. Test if
+   * the global init file .hiverc is loaded correctly by checking the expected
+   * setting property.
+   */
+  private void doTestSessionGlobalInitFile() throws Exception {
     SessionHandle sessionHandle = client.openSession(null, null, null);
 
     verifyInitProperty("a", "1", sessionHandle);

Modified: 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
 (original)
+++ 
hive/branches/llap/service/src/test/org/apache/hive/service/cli/thrift/ThriftCLIServiceTest.java
 Tue Oct 14 19:06:45 2014
@@ -177,7 +177,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID FROM TEST_EXEC_THRIFT";
+    queryString = "SELECT ID+1 FROM TEST_EXEC_THRIFT";
     OperationHandle opHandle = client.executeStatement(sessHandle,
         queryString, opConf);
     assertNotNull(opHandle);
@@ -227,7 +227,7 @@ public abstract class ThriftCLIServiceTe
     client.executeStatement(sessHandle, queryString, opConf);
 
     // Execute another query
-    queryString = "SELECT ID FROM TEST_EXEC_ASYNC_THRIFT";
+    queryString = "SELECT ID+1 FROM TEST_EXEC_ASYNC_THRIFT";
     System.out.println("Will attempt to execute: " + queryString);
     opHandle = client.executeStatementAsync(sessHandle,
         queryString, opConf);

Modified: 
hive/branches/llap/service/src/test/org/apache/hive/service/server/TestServerOptionsProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/service/src/test/org/apache/hive/service/server/TestServerOptionsProcessor.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/service/src/test/org/apache/hive/service/server/TestServerOptionsProcessor.java
 (original)
+++ 
hive/branches/llap/service/src/test/org/apache/hive/service/server/TestServerOptionsProcessor.java
 Tue Oct 14 19:06:45 2014
@@ -21,6 +21,8 @@ package org.apache.hive.service.server;
 import org.junit.Assert;
 import org.junit.Test;
 
+import org.apache.hive.service.server.HiveServer2.ServerOptionsProcessor;
+
 /**
  * Test ServerOptionsProcessor
  *
@@ -39,17 +41,12 @@ public class TestServerOptionsProcessor 
         null,
         System.getProperty(key));
 
+    optProcessor.parse(args);
 
-    boolean isSuccess = optProcessor.process(args);
-    Assert.assertTrue("options processor result", isSuccess);
     Assert.assertEquals(
         "checking system property after processing options",
         value,
         System.getProperty(key));
-
-
-
-
   }
 
 }

Modified: 
hive/branches/llap/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
 (original)
+++ 
hive/branches/llap/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
 Tue Oct 14 19:06:45 2014
@@ -37,6 +37,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 
 import javax.security.auth.Subject;
 import javax.security.auth.login.LoginException;
@@ -58,6 +59,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
+import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -652,6 +654,17 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
+  public TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,
+                                                             FileStatus 
status) throws IOException {
+    TreeMap<Long, BlockLocation> offsetBlockMap = new TreeMap<Long, 
BlockLocation>();
+    BlockLocation[] locations = getLocations(fs, status);
+    for (BlockLocation location : locations) {
+      offsetBlockMap.put(location.getOffset(), location);
+    }
+    return offsetBlockMap;
+  }
+
+  @Override
   public void hflush(FSDataOutputStream stream) throws IOException {
     stream.sync();
   }
@@ -906,4 +919,20 @@ public class Hadoop20Shims implements Ha
   public boolean hasStickyBit(FsPermission permission) {
     return false;   // not supported
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
+  }
+
+  @Override
+  public KerberosNameShim getKerberosNameShim(String name) throws IOException {
+    // Not supported
+    return null;
+  }
 }

Modified: 
hive/branches/llap/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
 (original)
+++ 
hive/branches/llap/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
 Tue Oct 14 19:06:45 2014
@@ -27,6 +27,7 @@ import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.filecache.DistributedCache;
@@ -59,6 +60,7 @@ import org.apache.hadoop.mapreduce.Outpu
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.security.KerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.VersionInfo;
@@ -403,6 +405,17 @@ public class Hadoop20SShims extends Hado
   }
 
   @Override
+  public TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,
+                                                             FileStatus 
status) throws IOException {
+    TreeMap<Long, BlockLocation> offsetBlockMap = new TreeMap<Long, 
BlockLocation>();
+    BlockLocation[] locations = getLocations(fs, status);
+    for (BlockLocation location : locations) {
+      offsetBlockMap.put(location.getOffset(), location);
+    }
+    return offsetBlockMap;
+  }
+
+  @Override
   public void hflush(FSDataOutputStream stream) throws IOException {
     stream.sync();
   }
@@ -522,6 +535,56 @@ public class Hadoop20SShims extends Hado
 
   @Override
   public boolean hasStickyBit(FsPermission permission) {
-    return false;   // not supported
+    return false;
+  }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return false;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    return null;
+  }
+
+  /**
+   * Returns a shim to wrap KerberosName
+   */
+  @Override
+  public KerberosNameShim getKerberosNameShim(String name) throws IOException {
+    return new KerberosNameShim(name);
+  }
+
+  /**
+   * Shim for KerberosName
+   */
+  public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
+
+    private KerberosName kerberosName;
+
+    public KerberosNameShim(String name) {
+      kerberosName = new KerberosName(name);
+    }
+
+    public String getDefaultRealm() {
+      return kerberosName.getDefaultRealm();
+    }
+
+    public String getServiceName() {
+      return kerberosName.getServiceName();
+    }
+
+    public String getHostName() {
+      return kerberosName.getHostName();
+    }
+
+    public String getRealm() {
+      return kerberosName.getRealm();
+    }
+
+    public String getShortName() throws IOException {
+      return kerberosName.getShortName();
+    }
   }
 }

Modified: 
hive/branches/llap/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
 (original)
+++ 
hive/branches/llap/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
 Tue Oct 14 19:06:45 2014
@@ -29,6 +29,7 @@ import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -45,6 +46,7 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
@@ -70,6 +72,7 @@ import org.apache.hadoop.mapreduce.TaskT
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.tez.test.MiniTezCluster;
@@ -511,6 +514,17 @@ public class Hadoop23Shims extends Hadoo
   }
 
   @Override
+  public TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,
+                                                             FileStatus 
status) throws IOException {
+    TreeMap<Long, BlockLocation> offsetBlockMap = new TreeMap<Long, 
BlockLocation>();
+    BlockLocation[] locations = getLocations(fs, status);
+    for (BlockLocation location : locations) {
+      offsetBlockMap.put(location.getOffset(), location);
+    }
+    return offsetBlockMap;
+  }
+
+  @Override
   public void hflush(FSDataOutputStream stream) throws IOException {
     stream.hflush();
   }
@@ -823,4 +837,55 @@ public class Hadoop23Shims extends Hadoo
   public boolean hasStickyBit(FsPermission permission) {
     return permission.getStickyBit();
   }
+
+  @Override
+  public boolean supportTrashFeature() {
+    return true;
+  }
+
+  @Override
+  public Path getCurrentTrashPath(Configuration conf, FileSystem fs) {
+    TrashPolicy tp = TrashPolicy.getInstance(conf, fs, fs.getHomeDirectory());
+    return tp.getCurrentTrashDir();
+  }
+
+  /**
+   * Returns a shim to wrap KerberosName
+   */
+  @Override
+  public KerberosNameShim getKerberosNameShim(String name) throws IOException {
+    return new KerberosNameShim(name);
+  }
+
+  /**
+   * Shim for KerberosName
+   */
+  public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
+
+    private KerberosName kerberosName;
+
+    public KerberosNameShim(String name) {
+      kerberosName = new KerberosName(name);
+    }
+
+    public String getDefaultRealm() {
+      return kerberosName.getDefaultRealm();
+    }
+
+    public String getServiceName() {
+      return kerberosName.getServiceName();
+    }
+
+    public String getHostName() {
+      return kerberosName.getHostName();
+    }
+
+    public String getRealm() {
+      return kerberosName.getRealm();
+    }
+
+    public String getShortName() throws IOException {
+      return kerberosName.getShortName();
+    }
+  }
 }

Modified: 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
 (original)
+++ 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DBTokenStore.java
 Tue Oct 14 19:06:45 2014
@@ -108,18 +108,17 @@ public class DBTokenStore implements Del
     return delTokenIdents;
   }
 
-  private Object hmsHandler;
+  private Object rawStore;
 
   @Override
-  public void setStore(Object hms) throws TokenStoreException {
-    hmsHandler = hms;
+  public void setStore(Object rawStore) throws TokenStoreException {
+    this.rawStore = rawStore;
   }
 
   private Object invokeOnRawStore(String methName, Object[] params, Class<?> 
... paramTypes)
       throws TokenStoreException{
 
     try {
-      Object rawStore = 
hmsHandler.getClass().getMethod("getMS").invoke(hmsHandler);
       return rawStore.getClass().getMethod(methName, 
paramTypes).invoke(rawStore, params);
     } catch (IllegalArgumentException e) {
         throw new TokenStoreException(e);

Modified: 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
 (original)
+++ 
hive/branches/llap/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
 Tue Oct 14 19:06:45 2014
@@ -417,7 +417,7 @@ public class HadoopThriftAuthBridge20S e
     }
 
     @Override
-    public void startDelegationTokenSecretManager(Configuration conf, Object 
hms)
+    public void startDelegationTokenSecretManager(Configuration conf, Object 
rawStore)
         throws IOException{
       long secretKeyInterval =
           conf.getLong(DELEGATION_KEY_UPDATE_INTERVAL_KEY,
@@ -430,7 +430,7 @@ public class HadoopThriftAuthBridge20S e
               DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT);
 
       DelegationTokenStore dts = getTokenStore(conf);
-      dts.setStore(hms);
+      dts.setStore(rawStore);
       secretManager = new 
TokenStoreDelegationTokenSecretManager(secretKeyInterval,
           tokenMaxLifetime,
           tokenRenewInterval,

Modified: 
hive/branches/llap/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- 
hive/branches/llap/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
 (original)
+++ 
hive/branches/llap/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
 Tue Oct 14 19:06:45 2014
@@ -30,6 +30,7 @@ import java.security.PrivilegedException
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
+import java.util.TreeMap;
 
 import javax.security.auth.login.LoginException;
 
@@ -477,6 +478,19 @@ public interface HadoopShims {
       FileStatus status) throws IOException;
 
   /**
+   * For the block locations returned by getLocations() convert them into a 
Treemap
+   * <Offset,blockLocation> by iterating over the list of blockLocation.
+   * Using TreeMap from offset to blockLocation, makes it O(logn) to get a 
particular
+   * block based upon offset.
+   * @param fs the file system
+   * @param status the file information
+   * @return TreeMap<Long, BlockLocation>
+   * @throws IOException
+   */
+  TreeMap<Long, BlockLocation> getLocationsWithOffset(FileSystem fs,
+      FileStatus status) throws IOException;
+
+  /**
    * Flush and make visible to other users the changes to the given stream.
    * @param stream the stream to hflush.
    * @throws IOException
@@ -707,4 +721,31 @@ public interface HadoopShims {
    * @return sticky bit
    */
   boolean hasStickyBit(FsPermission permission);
+
+  /**
+   * @return True if the current hadoop supports trash feature.
+   */
+  boolean supportTrashFeature();
+
+  /**
+   * @return Path to HDFS trash, if current hadoop supports trash feature.  
Null otherwise.
+   */
+  Path getCurrentTrashPath(Configuration conf, FileSystem fs);
+
+  /**
+   * Returns a shim to wrap KerberosName
+   */
+  public KerberosNameShim getKerberosNameShim(String name) throws IOException;
+
+  /**
+   * Shim for KerberosName
+   */
+  public interface KerberosNameShim {
+    public String getDefaultRealm();
+    public String getServiceName();
+    public String getHostName();
+    public String getRealm();
+    public String getShortName() throws IOException;
+  }
+
 }


Reply via email to