Repository: hive
Updated Branches:
  refs/heads/llap 23ede5dd1 -> bc8de94ae (forced update)


HIVE-12967: Change LlapServiceDriver to read a properties file instead of 
llap-daemon-site (Siddarth Seth, via Gopal V)

Signed-off-by: Gopal V <gop...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f8f50ab1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f8f50ab1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f8f50ab1

Branch: refs/heads/llap
Commit: f8f50ab17da3155489e96cdf2e9b849aa88b8700
Parents: 1b99efe
Author: Gopal V <gop...@apache.org>
Authored: Tue Feb 16 16:10:34 2016 -0800
Committer: Gopal V <gop...@apache.org>
Committed: Tue Feb 16 16:26:04 2016 -0800

----------------------------------------------------------------------
 bin/ext/llap.sh                                 |   1 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  78 +++++++++-
 .../llap/configuration/LlapConfiguration.java   |  41 -----
 .../registry/impl/LlapFixedRegistryImpl.java    |   6 +-
 .../registry/impl/LlapYarnRegistryImpl.java     |   5 +-
 .../hive/llap/cli/LlapOptionsProcessor.java     |  96 ++++++++----
 .../hadoop/hive/llap/cli/LlapServiceDriver.java | 155 ++++++++++++-------
 .../configuration/LlapDaemonConfiguration.java  |  39 +++++
 .../hive/llap/daemon/impl/AMReporter.java       |   1 -
 .../hive/llap/daemon/impl/LlapDaemon.java       |   4 +-
 .../hive/llap/daemon/impl/QueryTracker.java     |   2 -
 .../llap/shufflehandler/ShuffleHandler.java     |  21 ++-
 .../main/resources/llap-cli-log4j2.properties   |  77 +++++++++
 .../hive/llap/daemon/MiniLlapCluster.java       |   1 -
 .../impl/TestLlapDaemonProtocolServerImpl.java  |   4 +-
 15 files changed, 376 insertions(+), 155 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/bin/ext/llap.sh
----------------------------------------------------------------------
diff --git a/bin/ext/llap.sh b/bin/ext/llap.sh
index fa87587..838bb3a 100644
--- a/bin/ext/llap.sh
+++ b/bin/ext/llap.sh
@@ -30,6 +30,7 @@ llap () {
 
   set -e;
 
+  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS 
-Dlog4j.configurationFile=llap-cli-log4j2.properties "
   # hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
   $HADOOP $CLASS $HIVE_OPTS -directory $TMPDIR "$@"
   

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 3ab1dba..48e8491 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.conf;
 import com.google.common.base.Joiner;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import 
org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.conf.Validator.PatternSet;
 import org.apache.hadoop.hive.conf.Validator.RangeValidator;
@@ -46,9 +47,11 @@ import java.io.InputStream;
 import java.io.PrintStream;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -109,8 +112,17 @@ public class HiveConf extends Configuration {
     for (ConfVars confVar : ConfVars.values()) {
       vars.put(confVar.varname, confVar);
     }
+
+    Set<String> llapDaemonConfVarsSetLocal = new LinkedHashSet<>();
+    populateLlapDaemonVarsSet(llapDaemonConfVarsSetLocal);
+    llapDaemonVarsSet = 
Collections.unmodifiableSet(llapDaemonConfVarsSetLocal);
   }
 
+  @InterfaceAudience.Private
+  public static final String PREFIX_LLAP = "llap.";
+  @InterfaceAudience.Private
+  public static final String PREFIX_HIVE_LLAP = "hive.llap.";
+
   /**
    * Metastore related options that the db is initialized against. When a conf
    * var in this is list is changed, the metastore instance for the CLI will
@@ -237,6 +249,70 @@ public class HiveConf extends Configuration {
   };
 
   /**
+   * Variables used by LLAP daemons.
+   * TODO: Eventually auto-populate this based on prefixes. The conf variables
+   * will need to be renamed for this.
+   */
+  private static final Set<String> llapDaemonVarsSet;
+
+  private static void populateLlapDaemonVarsSet(Set<String> 
llapDaemonVarsSetLocal) {
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_IO_ENABLED.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_IO_MEMORY_MODE.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ALLOCATOR_MIN_ALLOC.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ALLOCATOR_MAX_ALLOC.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ALLOCATOR_ARENA_COUNT.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ALLOCATOR_DIRECT.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_USE_LRFU.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_LRFU_LAMBDA.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_CACHE_ALLOW_SYNTHETIC_FILEID.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_IO_USE_FILEID_PATH.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ORC_ENABLE_TIME_COUNTERS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_IO_THREADPOOL_SIZE.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_KERBEROS_PRINCIPAL.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_KERBEROS_KEYTAB_FILE.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_ZKSM_KERBEROS_PRINCIPAL.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_ZKSM_KERBEROS_KEYTAB_FILE.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_ZKSM_ZK_CONNECTION_STRING.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_SECURITY_ACL.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_MANAGEMENT_ACL.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DELEGATION_TOKEN_LIFETIME.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_MANAGEMENT_RPC_PORT.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_WEB_AUTO_AUTH.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_RPC_NUM_HANDLERS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_WORK_DIRS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_SHUFFLE_DIR_WATCHER_ENABLED.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_AM_LIVENESS_HEARTBEAT_INTERVAL_MS.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_AM_LIVENESS_CONNECTION_TIMEOUT_MS.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_AM_LIVENESS_CONNECTION_SLEEP_BETWEEN_RETRIES_MS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_RPC_PORT.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_VCPUS_PER_INSTANCE.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_NUM_FILE_CLEANER_THREADS.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_FILE_CLEANUP_DELAY_SECONDS.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_SERVICE_REFRESH_INTERVAL.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_ALLOW_PERMANENT_FNS.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_WAIT_QUEUE_COMPARATOR_CLASS_NAME.varname);
+    
llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_TASK_SCHEDULER_ENABLE_PREEMPTION.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_WEB_PORT.varname);
+    llapDaemonVarsSetLocal.add(ConfVars.LLAP_DAEMON_WEB_SSL.varname);
+  }
+
+  /**
+   * Get a set containing configuration parameter names used by LLAP Server 
isntances
+   * @return an unmodifiable set containing llap ConfVars
+   */
+  public static final Set<String> getLlapDaemonConfVars() {
+    return llapDaemonVarsSet;
+  }
+
+
+  /**
    * ConfVars.
    *
    * These are the default configuration properties for Hive. Each HiveConf
@@ -2492,7 +2568,7 @@ public class HiveConf extends Configuration {
     LLAP_DAEMON_YARN_SHUFFLE_PORT("hive.llap.daemon.yarn.shuffle.port", 15551,
       "YARN shuffle port for LLAP-daemon-hosted shuffle.", 
"llap.daemon.yarn.shuffle.port"),
     LLAP_DAEMON_YARN_CONTAINER_MB("hive.llap.daemon.yarn.container.mb", -1,
-      "TODO doc. Unused?", "llap.daemon.yarn.container.mb"),
+      "llap server yarn container size in MB. Used in LlapServiceDriver and 
package.py", "llap.daemon.yarn.container.mb"),
     
LLAP_DAEMON_SHUFFLE_DIR_WATCHER_ENABLED("hive.llap.daemon.shuffle.dir.watcher.enabled",
 false,
       "TODO doc", "llap.daemon.shuffle.dir-watcher.enabled"),
     LLAP_DAEMON_AM_LIVENESS_HEARTBEAT_INTERVAL_MS(

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-client/src/java/org/apache/hadoop/hive/llap/configuration/LlapConfiguration.java
----------------------------------------------------------------------
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/configuration/LlapConfiguration.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/configuration/LlapConfiguration.java
deleted file mode 100644
index abdbc09..0000000
--- 
a/llap-client/src/java/org/apache/hadoop/hive/llap/configuration/LlapConfiguration.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.llap.configuration;
-
-import java.net.URL;
-
-import org.apache.hadoop.conf.Configuration;
-
-public class LlapConfiguration extends Configuration {
-  public static final String LLAP_PREFIX = "llap.";
-  public static final String LLAP_DAEMON_PREFIX = "llap.daemon.";
-
-  public LlapConfiguration(Configuration conf) {
-    super(conf);
-    addResource(LLAP_DAEMON_SITE);
-  }
-
-  public LlapConfiguration() {
-    super(false);
-    addResource(LLAP_DAEMON_SITE);
-  }
-
-  public LlapConfiguration(Configuration conf, URL llapDaemonConfLocation) {
-    super(conf);
-    addResource(llapDaemonConfLocation);
-  }
-
-  private static final String LLAP_DAEMON_SITE = "llap-daemon-site.xml";
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapFixedRegistryImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapFixedRegistryImpl.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapFixedRegistryImpl.java
index 92044bb..c3c16c4 100644
--- 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapFixedRegistryImpl.java
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapFixedRegistryImpl.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import org.apache.hadoop.hive.llap.registry.ServiceInstance;
 import org.apache.hadoop.hive.llap.registry.ServiceInstanceSet;
 import org.apache.hadoop.hive.llap.registry.ServiceRegistry;
@@ -67,9 +66,8 @@ public class LlapFixedRegistryImpl implements ServiceRegistry 
{
     this.mngPort = HiveConf.getIntVar(conf, ConfVars.LLAP_MANAGEMENT_RPC_PORT);
 
     for (Map.Entry<String, String> kv : conf) {
-      if (kv.getKey().startsWith(LlapConfiguration.LLAP_DAEMON_PREFIX)
-          || kv.getKey().startsWith("hive.llap.")
-          || kv.getKey().startsWith(LlapConfiguration.LLAP_PREFIX)) {
+      if (kv.getKey().startsWith(HiveConf.PREFIX_LLAP)
+          || kv.getKey().startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
         // TODO: read this somewhere useful, like the task scheduler
         srv.put(kv.getKey(), kv.getValue());
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapYarnRegistryImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapYarnRegistryImpl.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapYarnRegistryImpl.java
index efe31cc..c83dd6e 100644
--- 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapYarnRegistryImpl.java
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapYarnRegistryImpl.java
@@ -39,7 +39,6 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import org.apache.hadoop.hive.llap.registry.ServiceInstance;
 import org.apache.hadoop.hive.llap.registry.ServiceInstanceSet;
 import org.apache.hadoop.hive.llap.registry.ServiceRegistry;
@@ -161,8 +160,8 @@ public class LlapYarnRegistryImpl implements 
ServiceRegistry {
     srv.addExternalEndpoint(getServicesEndpoint());
 
     for (Map.Entry<String, String> kv : this.conf) {
-      if (kv.getKey().startsWith(LlapConfiguration.LLAP_DAEMON_PREFIX)
-          || kv.getKey().startsWith("hive.llap.")) {
+      if (kv.getKey().startsWith(HiveConf.PREFIX_LLAP)
+          || kv.getKey().startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
         // TODO: read this somewhere useful, like the task scheduler
         srv.set(kv.getKey(), kv.getValue());
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
index 6d25384..1732df8 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.hive.llap.cli;
 
-import java.util.HashMap;
-import java.util.Map;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
 import java.util.Properties;
 
 import javax.annotation.Nonnull;
@@ -36,6 +37,21 @@ import org.apache.hadoop.util.StringUtils;
 
 public class LlapOptionsProcessor {
 
+  public static final String OPTION_INSTANCES = "instances"; //forward as arg
+  public static final String OPTION_NAME = "name"; // forward as arg
+  public static final String OPTION_DIRECTORY = "directory"; // work-dir
+  public static final String OPTION_ARGS = "args"; // forward as arg
+  public static final String OPTION_LOGLEVEL = "loglevel"; // forward as arg
+  public static final String OPTION_CHAOS_MONKEY = "chaosmonkey"; // forward 
as arg
+  public static final String OPTION_EXECUTORS = "executors"; // 
llap-daemon-site
+  public static final String OPTION_CACHE = "cache"; // llap-daemon-site
+  public static final String OPTION_SIZE = "size"; // forward via config.json
+  public static final String OPTION_XMX = "xmx"; // forward as arg
+  public static final String OPTION_AUXJARS = "auxjars"; // used to localize 
jars
+  public static final String OPTION_AUXHBASE = "auxhbase"; // used to localize 
jars
+  public static final String OPTION_JAVA_HOME = "javaHome"; // forward via 
config.json
+  public static final String OPTION_HIVECONF = "hiveconf"; // llap-daemon-site 
if relevant parameter
+
   public class LlapOptions {
     private final int instances;
     private final String directory;
@@ -47,10 +63,12 @@ public class LlapOptionsProcessor {
     private final String jars;
     private final boolean isHbase;
     private final Properties conf;
+    private final String javaPath;
 
     public LlapOptions(String name, int instances, String directory, int 
executors, long cache,
-        long size, long xmx, String jars, boolean isHbase, @Nonnull Properties 
hiveconf)
-            throws ParseException {
+                       long size, long xmx, String jars, boolean isHbase,
+                       @Nonnull Properties hiveconf, String javaPath)
+        throws ParseException {
       if (instances <= 0) {
         throw new ParseException("Invalid configuration: " + instances
             + " (should be greater than 0)");
@@ -65,6 +83,7 @@ public class LlapOptionsProcessor {
       this.jars = jars;
       this.isHbase = isHbase;
       this.conf = hiveconf;
+      this.javaPath = javaPath;
     }
 
     public String getName() {
@@ -106,57 +125,67 @@ public class LlapOptionsProcessor {
     public Properties getConfig() {
       return conf;
     }
+
+    public String getJavaPath() {
+      return javaPath;
+    }
   }
 
   protected static final Logger l4j = 
LoggerFactory.getLogger(LlapOptionsProcessor.class.getName());
   private final Options options = new Options();
-  Map<String, String> hiveVariables = new HashMap<String, String>();
   private org.apache.commons.cli.CommandLine commandLine;
 
   @SuppressWarnings("static-access")
   public LlapOptionsProcessor() {
 
     // set the number of instances on which llap should run
-    
options.addOption(OptionBuilder.hasArg().withArgName("instances").withLongOpt("instances")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_INSTANCES).withLongOpt(OPTION_INSTANCES)
         .withDescription("Specify the number of instances to run this 
on").create('i'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("name").withLongOpt("name")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_NAME).withLongOpt(OPTION_NAME)
         .withDescription("Cluster name for YARN registry").create('n'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("directory").withLongOpt("directory")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_DIRECTORY).withLongOpt(OPTION_DIRECTORY)
         .withDescription("Temp directory for jars etc.").create('d'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("args").withLongOpt("args")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_ARGS).withLongOpt(OPTION_ARGS)
         .withDescription("java arguments to the llap instance").create('a'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("loglevel").withLongOpt("loglevel")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_LOGLEVEL).withLongOpt(OPTION_LOGLEVEL)
         .withDescription("log levels for the llap instance").create('l'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("chaosmonkey").withLongOpt("chaosmonkey")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_CHAOS_MONKEY).withLongOpt(OPTION_CHAOS_MONKEY)
         .withDescription("chaosmonkey interval").create('m'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("executors").withLongOpt("executors")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_EXECUTORS).withLongOpt(OPTION_EXECUTORS)
         .withDescription("executor per instance").create('e'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("cache").withLongOpt("cache")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_CACHE).withLongOpt(OPTION_CACHE)
         .withDescription("cache size per instance").create('c'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("size").withLongOpt("size")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_SIZE).withLongOpt(OPTION_SIZE)
         .withDescription("container size per instance").create('s'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("xmx").withLongOpt("xmx")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_XMX).withLongOpt(OPTION_XMX)
         .withDescription("working memory size").create('w'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_AUXJARS).withLongOpt(OPTION_AUXJARS)
         .withDescription("additional jars to package (by default, JSON SerDe 
jar is packaged"
             + " if available)").create('j'));
 
-    
options.addOption(OptionBuilder.hasArg().withArgName("auxhbase").withLongOpt("auxhbase")
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_AUXHBASE).withLongOpt(OPTION_AUXHBASE)
         .withDescription("whether to package the HBase jars (true by 
default)").create('h'));
 
+    
options.addOption(OptionBuilder.hasArg().withArgName(OPTION_JAVA_HOME).withLongOpt(OPTION_JAVA_HOME)
+        .withDescription(
+            "Path to the JRE/JDK. This should be installed at the same 
location on all cluster nodes ($JAVA_HOME, java.home by default)")
+        .create());
+
     // -hiveconf x=y
     
options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value")
-        .withLongOpt("hiveconf").withDescription("Use value for given 
property").create());
+        .withLongOpt(OPTION_HIVECONF)
+        .withDescription("Use value for given property. Overridden by explicit 
parameters")
+        .create());
 
     // [-H|--help]
     options.addOption(new Option("H", "help", false, "Print help 
information"));
@@ -166,7 +195,7 @@ public class LlapOptionsProcessor {
     return StringUtils.TraditionalBinaryPrefix.string2long(value);
   }
 
-  public LlapOptions processOptions(String argv[]) throws ParseException {
+  public LlapOptions processOptions(String argv[]) throws ParseException, 
IOException {
     commandLine = new GnuParser().parse(options, argv);
     if (commandLine.hasOption('H') || false == 
commandLine.hasOption("instances")) {
       // needs at least --instances
@@ -174,31 +203,34 @@ public class LlapOptionsProcessor {
       return null;
     }
 
-    int instances = Integer.parseInt(commandLine.getOptionValue("instances"));
-    String directory = commandLine.getOptionValue("directory");
-    String jars = commandLine.getOptionValue("auxjars");
+    int instances = 
Integer.parseInt(commandLine.getOptionValue(OPTION_INSTANCES));
+    String directory = commandLine.getOptionValue(OPTION_DIRECTORY);
+    String jars = commandLine.getOptionValue(OPTION_AUXJARS);
 
-    String name = commandLine.getOptionValue("name", null);
+    String name = commandLine.getOptionValue(OPTION_NAME, null);
 
-    final int executors = 
Integer.parseInt(commandLine.getOptionValue("executors", "-1"));
-    final long cache = parseSuffixed(commandLine.getOptionValue("cache", 
"-1"));
-    final long size = parseSuffixed(commandLine.getOptionValue("size", "-1"));
-    final long xmx = parseSuffixed(commandLine.getOptionValue("xmx", "-1"));
-    final boolean isHbase = 
Boolean.parseBoolean(commandLine.getOptionValue("auxhbase", "true"));
+    final int executors = 
Integer.parseInt(commandLine.getOptionValue(OPTION_EXECUTORS, "-1"));
+    final long cache = parseSuffixed(commandLine.getOptionValue(OPTION_CACHE, 
"-1"));
+    final long size = parseSuffixed(commandLine.getOptionValue(OPTION_SIZE, 
"-1"));
+    final long xmx = parseSuffixed(commandLine.getOptionValue(OPTION_XMX, 
"-1"));
+    final boolean isHbase = 
Boolean.parseBoolean(commandLine.getOptionValue(OPTION_AUXHBASE, "true"));
 
     final Properties hiveconf;
 
-    if (commandLine.hasOption("hiveconf")) {
-      hiveconf = commandLine.getOptionProperties("hiveconf");
+    if (commandLine.hasOption(OPTION_HIVECONF)) {
+      hiveconf = commandLine.getOptionProperties(OPTION_HIVECONF);
     } else {
       hiveconf = new Properties();
     }
 
+    String javaHome = null;
+    if (commandLine.hasOption(OPTION_JAVA_HOME)) {
+      javaHome = commandLine.getOptionValue(OPTION_JAVA_HOME);
+    }
     // loglevel, chaosmonkey & args are parsed by the python processor
 
     return new LlapOptions(
-        name, instances, directory, executors, cache, size, xmx, jars, 
isHbase, hiveconf);
-
+        name, instances, directory, executors, cache, size, xmx, jars, 
isHbase, hiveconf, javaHome);
   }
 
   private void printUsage() {

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index d01c8ce..32c791f 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.llap.cli;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.net.URL;
-import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Collection;
+import java.util.Properties;
 
+import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration;
 import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos;
 import org.apache.hadoop.hive.llap.tezplugins.LlapTezUtils;
+import org.apache.tez.dag.api.TezConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -52,14 +54,18 @@ import com.google.common.base.Preconditions;
 public class LlapServiceDriver {
 
   protected static final Logger LOG = 
LoggerFactory.getLogger(LlapServiceDriver.class.getName());
+
   private static final String[] DEFAULT_AUX_CLASSES = new String[] {
   "org.apache.hive.hcatalog.data.JsonSerDe" };
   private static final String HBASE_SERDE_CLASS = 
"org.apache.hadoop.hive.hbase.HBaseSerDe";
-  private static final String[] NEEDED_CONFIGS = {
-    "tez-site.xml", "hive-site.xml", "llap-daemon-site.xml", "core-site.xml" };
+  private static final String[] NEEDED_CONFIGS = 
LlapDaemonConfiguration.DAEMON_CONFIGS;
   private static final String[] OPTIONAL_CONFIGS = { "ssl-server.xml" };
 
 
+  /**
+   * This is a working configuration for the instance to merge various 
variables.
+   * It is not written out for llap server usage
+   */
   private final Configuration conf;
 
   public LlapServiceDriver() {
@@ -82,40 +88,56 @@ public class LlapServiceDriver {
     System.exit(ret);
   }
 
-  /**
-   * Intersect llap-daemon-site.xml configuration properties against an 
existing Configuration
-   * object, while resolving any ${} parameters that might be present.
-   * 
-   * @param raw
-   * @return configuration object which is a slice of configured
-   */
-  public static Configuration resolve(Configuration configured, String first, 
String... resources) {
-    Configuration defaults = new Configuration(false);
 
-    defaults.addResource(first);
+  private static Configuration resolve(Configuration configured, Properties 
direct,
+                                       Properties hiveconf) {
+    Configuration conf = new Configuration(false);
 
-    for (String resource : resources) {
-      defaults.addResource(resource);
-    }
+    populateConf(configured, conf, hiveconf, "CLI hiveconf");
+    populateConf(configured, conf, direct, "CLI direct");
+
+    return conf;
+  }
 
-    Configuration slice = new Configuration(false);
-    // for everything in defaults, slice out those from the configured
-    for (Map.Entry<String, String> kv : defaults) {
-      slice.set(kv.getKey(), configured.get(kv.getKey()));
+  private static void populateConf(Configuration configured, Configuration 
target,
+                                   Properties properties, String source) {
+    for (Entry<Object, Object> entry : properties.entrySet()) {
+      String key = (String) entry.getKey();
+      String val = configured.get(key);
+      if (val != null) {
+        target.set(key, val, source);
+      }
     }
+  }
 
-    return slice;
+  private static void populateConfWithLlapProperties(Configuration conf, 
Properties properties) {
+    for(Entry<Object, Object> props : properties.entrySet()) {
+      String key = (String) props.getKey();
+      if (HiveConf.getLlapDaemonConfVars().contains(key)) {
+        conf.set(key, (String) props.getValue());
+      } else {
+        if (key.startsWith(HiveConf.PREFIX_LLAP) || 
key.startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
+          LOG.warn("Adding key [{}] even though it is not in the set of known 
llap-server keys");
+          conf.set(key, (String) props.getValue());
+        } else {
+          LOG.warn("Ignoring unknown llap server parameter: [{}]", key);
+        }
+      }
+    }
   }
 
   private void run(String[] args) throws Exception {
     LlapOptionsProcessor optionsProcessor = new LlapOptionsProcessor();
     LlapOptions options = optionsProcessor.processOptions(args);
 
+    Properties propsDirectOptions = new Properties();
+
     if (options == null) {
       // help
       return;
     }
 
+    // Working directory.
     Path tmpDir = new Path(options.getDirectory());
 
     if (conf == null) {
@@ -135,14 +157,19 @@ public class LlapServiceDriver {
     for (String f : OPTIONAL_CONFIGS) {
       conf.addResource(f);
     }
+
     conf.reloadConfiguration();
 
+    populateConfWithLlapProperties(conf, options.getConfig());
+
+
     if (options.getName() != null) {
       // update service registry configs - caveat: this has nothing to do with 
the actual settings
       // as read by the AM
       // if needed, use --hiveconf llap.daemon.service.hosts=@llap0 to 
dynamically switch between
       // instances
       conf.set(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, "@" + 
options.getName());
+      
propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, "@" 
+ options.getName());
     }
 
     if (options.getSize() != -1) {
@@ -160,35 +187,40 @@ public class LlapServiceDriver {
       }
     }
 
+    // This parameter is read in package.py - and nowhere else. Does not need 
to be part of HiveConf - that's just confusing.
     final long minAlloc = 
conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, -1);
+    long containerSize = -1;
     if (options.getSize() != -1) {
-      final long containerSize = options.getSize() / (1024 * 1024);
+      containerSize = options.getSize() / (1024 * 1024);
       Preconditions.checkArgument(containerSize >= minAlloc,
           "Container size should be greater than minimum allocation(%s)", 
minAlloc + "m");
       conf.setLong(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, 
containerSize);
+      
propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, 
String.valueOf(containerSize));
     }
 
     if (options.getExecutors() != -1) {
       conf.setLong(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, 
options.getExecutors());
+      
propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, 
String.valueOf(options.getExecutors()));
       // TODO: vcpu settings - possibly when DRFA works right
     }
 
     if (options.getCache() != -1) {
       conf.set(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname,
           Long.toString(options.getCache()));
+      
propsDirectOptions.setProperty(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname,
+          Long.toString(options.getCache()));
     }
 
     if (options.getXmx() != -1) {
       // Needs more explanation here
       // Xmx is not the max heap value in JDK8
-      // You need to subtract 50% of the survivor fraction from this, to get 
actual usable memory before it goes into GC 
-      conf.setLong(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, 
(long)(options.getXmx())
-          / (1024 * 1024));
+      // You need to subtract 50% of the survivor fraction from this, to get 
actual usable memory before it goes into GC
+      long xmx = (long) (options.getXmx() / (1024 * 1024));
+      conf.setLong(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, xmx);
+      
propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname,
 String.valueOf(xmx));
     }
 
-    for (Entry<Object, Object> props : options.getConfig().entrySet()) {
-      conf.set((String) props.getKey(), (String) props.getValue());
-    }
+
 
     URL logger = conf.getResource("llap-daemon-log4j2.properties");
 
@@ -205,9 +237,10 @@ public class LlapServiceDriver {
       LOG.warn("Unable to find llap scripts:" + scripts);
     }
 
+
     Path libDir = new Path(tmpDir, "lib");
 
-    String tezLibs = conf.get("tez.lib.uris");
+    String tezLibs = conf.get(TezConfiguration.TEZ_LIB_URIS);
     if (tezLibs == null) {
       LOG.warn("Missing tez.lib.uris in tez-site.xml");
     }
@@ -264,31 +297,43 @@ public class LlapServiceDriver {
       }
     }
 
+    String java_home;
+    if (options.getJavaPath() == null || options.getJavaPath().isEmpty()) {
+      java_home = System.getenv("JAVA_HOME");
+      String jre_home = System.getProperty("java.home");
+      if (java_home == null) {
+        java_home = jre_home;
+      } else if (!java_home.equals(jre_home)) {
+        LOG.warn("Java versions might not match : JAVA_HOME=[{}],process 
jre=[{}]",
+            java_home, jre_home);
+      }
+    } else {
+      java_home = options.getJavaPath();
+    }
+    if (java_home == null || java_home.isEmpty()) {
+      throw new RuntimeException(
+          "Could not determine JAVA_HOME from command line parameters, 
environment or system properties");
+    }
+    LOG.info("Using [{}] for JAVA_HOME", java_home);
+
     Path confPath = new Path(tmpDir, "conf");
     lfs.mkdirs(confPath);
 
+    // Copy over the mandatory configs for the package.
     for (String f : NEEDED_CONFIGS) {
-      copyConfig(options, lfs, confPath, f);
+      copyConfig(lfs, confPath, f);
     }
     for (String f : OPTIONAL_CONFIGS) {
       try {
-        copyConfig(options, lfs, confPath, f);
+        copyConfig(lfs, confPath, f);
       } catch (Throwable t) {
         LOG.info("Error getting an optional config " + f + "; ignoring: " + 
t.getMessage());
       }
     }
+    createLlapDaemonConfig(lfs, confPath, conf, propsDirectOptions, 
options.getConfig());
 
     lfs.copyFromLocalFile(new Path(logger.toString()), confPath);
 
-    String java_home = System.getenv("JAVA_HOME");
-    String jre_home = System.getProperty("java.home");
-    if (java_home == null) {
-      java_home = jre_home;
-    } else if (!java_home.equals(jre_home)) {
-      LOG.warn("Java versions might not match : JAVA_HOME=%s,process jre=%s", 
-          java_home, jre_home);
-    }
-
     // extract configs for processing by the python fragments in Slider
     JSONObject configs = new JSONObject();
 
@@ -296,6 +341,7 @@ public class LlapServiceDriver {
 
     configs.put(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, 
HiveConf.getIntVar(conf,
         ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB));
+    configs.put(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, containerSize);
 
     configs.put(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname,
         HiveConf.getSizeVar(conf, HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE));
@@ -362,23 +408,22 @@ public class LlapServiceDriver {
     }
   }
 
-  private void copyConfig(
-      LlapOptions options, FileSystem lfs, Path confPath, String f) throws 
IOException {
-    if (f.equals("llap-daemon-site.xml")) {
-      FSDataOutputStream confStream = lfs.create(new Path(confPath, f));
+  private void createLlapDaemonConfig(FileSystem lfs, Path confPath, 
Configuration configured,
+                                      Properties direct, Properties hiveconf) 
throws IOException {
+    FSDataOutputStream confStream =
+        lfs.create(new Path(confPath, 
LlapDaemonConfiguration.LLAP_DAEMON_SITE));
 
-      Configuration copy = resolve(conf, "llap-daemon-site.xml");
+    Configuration llapDaemonConf = resolve(configured, direct, hiveconf);
 
-      for (Entry<Object, Object> props : options.getConfig().entrySet()) {
-        // overrides
-        copy.set((String) props.getKey(), (String) props.getValue());
-      }
+    llapDaemonConf.writeXml(confStream);
+    confStream.close();
+  }
 
-      copy.writeXml(confStream);
-      confStream.close();
-    } else {
-      // they will be file:// URLs
-      lfs.copyFromLocalFile(new Path(conf.getResource(f).toString()), 
confPath);
-    }
+  private void copyConfig(FileSystem lfs, Path confPath, String f) throws 
IOException {
+    HiveConf.getBoolVar(new Configuration(false), 
ConfVars.LLAP_CLIENT_CONSISTENT_SPLITS);
+    // they will be file:// URLs
+    lfs.copyFromLocalFile(new Path(conf.getResource(f).toString()), confPath);
   }
+
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/configuration/LlapDaemonConfiguration.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/configuration/LlapDaemonConfiguration.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/configuration/LlapDaemonConfiguration.java
new file mode 100644
index 0000000..72340f5
--- /dev/null
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/configuration/LlapDaemonConfiguration.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.llap.configuration;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+
+/**
+ * Configuration for LLAP daemon processes only. This should not be used by 
any clients.
+ */
+public class LlapDaemonConfiguration extends Configuration {
+
+  @InterfaceAudience.Private
+  public static final String LLAP_DAEMON_SITE = "llap-daemon-site.xml";
+
+  @InterfaceAudience.Private
+  public static final String[] DAEMON_CONFIGS = { /* in specific order 
*/"core-site.xml",
+      "hdfs-site.xml", "yarn-site.xml", "tez-site.xml", "hive-site.xml" };
+  
+  public LlapDaemonConfiguration() {
+    super(false);
+    for (String conf : DAEMON_CONFIGS) {
+      addResource(conf);
+    }
+    addResource(LLAP_DAEMON_SITE);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java
index d1ec715..04c28cb 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.llap.LlapNodeId;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler;
 import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol;
 import org.apache.hadoop.io.Text;

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
index c0d4690..e066742 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
@@ -29,7 +29,7 @@ import javax.management.ObjectName;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
+import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration;
 import org.apache.hadoop.hive.llap.daemon.ContainerRunner;
 import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler;
 import 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto;
@@ -302,7 +302,7 @@ public class LlapDaemon extends CompositeService implements 
ContainerRunner, Lla
     try {
       // Cache settings will need to be setup in llap-daemon-site.xml - since 
the daemons don't read hive-site.xml
       // Ideally, these properties should be part of LlapDameonConf rather 
than HiveConf
-      LlapConfiguration daemonConf = new LlapConfiguration();
+      LlapDaemonConfiguration daemonConf = new LlapDaemonConfiguration();
       int numExecutors = HiveConf.getIntVar(daemonConf, 
ConfVars.LLAP_DAEMON_NUM_EXECUTORS);
 
       String localDirList = HiveConf.getVar(daemonConf, 
ConfVars.LLAP_DAEMON_WORK_DIRS);

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
index 80264a0..14657e6 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentSpecProto;
 import 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto;
 import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler;
@@ -42,7 +41,6 @@ import java.util.List;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantLock;

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
index 2c51169..39a1468 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
@@ -119,10 +119,10 @@ public class ShuffleHandler implements 
AttemptRegistrationListener {
 
   public static final String SHUFFLE_HANDLER_LOCAL_DIRS = 
"llap.shuffle.handler.local-dirs";
 
-  public static final String SHUFFLE_MANAGE_OS_CACHE = 
"mapreduce.shuffle.manage.os.cache";
+  public static final String SHUFFLE_MANAGE_OS_CACHE = 
"lla[.shuffle.manage.os.cache";
   public static final boolean DEFAULT_SHUFFLE_MANAGE_OS_CACHE = true;
 
-  public static final String SHUFFLE_READAHEAD_BYTES = 
"mapreduce.shuffle.readahead.bytes";
+  public static final String SHUFFLE_READAHEAD_BYTES = 
"llap.shuffle.readahead.bytes";
   public static final int DEFAULT_SHUFFLE_READAHEAD_BYTES = 4 * 1024 * 1024;
 
   public static final String SHUFFLE_DIR_WATCHER_ENABLED = 
"llap.shuffle.dir-watcher.enabled";
@@ -163,40 +163,39 @@ public class ShuffleHandler implements 
AttemptRegistrationListener {
   public static final String SHUFFLE_PORT_CONFIG_KEY = "llap.shuffle.port";
   public static final int DEFAULT_SHUFFLE_PORT = 15551;
 
-  // TODO Change configs to remove mapreduce references.
   public static final String SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED =
-      "mapreduce.shuffle.connection-keep-alive.enable";
+      "llap.shuffle.connection-keep-alive.enable";
   public static final boolean DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED = 
false;
 
   public static final String SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT =
-      "mapreduce.shuffle.connection-keep-alive.timeout";
+      "llap.shuffle.connection-keep-alive.timeout";
   public static final int DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT = 5; 
//seconds
 
   public static final String SHUFFLE_MAPOUTPUT_META_INFO_CACHE_SIZE =
-      "mapreduce.shuffle.mapoutput-info.meta.cache.size";
+      "llap.shuffle.mapoutput-info.meta.cache.size";
   public static final int DEFAULT_SHUFFLE_MAPOUTPUT_META_INFO_CACHE_SIZE =
       10000;
 
   public static final String CONNECTION_CLOSE = "close";
 
   public static final String SUFFLE_SSL_FILE_BUFFER_SIZE_KEY =
-    "mapreduce.shuffle.ssl.file.buffer.size";
+    "llap.shuffle.ssl.file.buffer.size";
 
   public static final int DEFAULT_SUFFLE_SSL_FILE_BUFFER_SIZE = 60 * 1024;
 
-  public static final String MAX_SHUFFLE_CONNECTIONS = 
"mapreduce.shuffle.max.connections";
+  public static final String MAX_SHUFFLE_CONNECTIONS = 
"llap.shuffle.max.connections";
   public static final int DEFAULT_MAX_SHUFFLE_CONNECTIONS = 0; // 0 implies no 
limit
   
-  public static final String MAX_SHUFFLE_THREADS = 
"mapreduce.shuffle.max.threads";
+  public static final String MAX_SHUFFLE_THREADS = "llap.shuffle.max.threads";
   // 0 implies Netty default of 2 * number of available processors
   public static final int DEFAULT_MAX_SHUFFLE_THREADS = 0;
   
   public static final String SHUFFLE_BUFFER_SIZE = 
-      "mapreduce.shuffle.transfer.buffer.size";
+      "llap.shuffle.transfer.buffer.size";
   public static final int DEFAULT_SHUFFLE_BUFFER_SIZE = 128 * 1024;
   
   public static final String  SHUFFLE_TRANSFERTO_ALLOWED = 
-      "mapreduce.shuffle.transferTo.allowed";
+      "llap.shuffle.transferTo.allowed";
   public static final boolean DEFAULT_SHUFFLE_TRANSFERTO_ALLOWED = true;
   public static final boolean WINDOWS_DEFAULT_SHUFFLE_TRANSFERTO_ALLOWED = 
       false;

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/main/resources/llap-cli-log4j2.properties
----------------------------------------------------------------------
diff --git a/llap-server/src/main/resources/llap-cli-log4j2.properties 
b/llap-server/src/main/resources/llap-cli-log4j2.properties
new file mode 100644
index 0000000..7542193
--- /dev/null
+++ b/llap-server/src/main/resources/llap-cli-log4j2.properties
@@ -0,0 +1,77 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+status = WARN
+name = LlapCliLog4j2
+packages = org.apache.hadoop.hive.ql.log
+
+# list of properties
+property.hive.log.level = INFO
+property.hive.root.logger = console
+property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
+property.hive.log.file = hive.log
+
+# list of all appenders
+appenders = console, DRFA
+
+# console appender
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+# appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
+appender.console.layout.pattern = %p %c{2}: %m%n
+
+# daily rolling file appender
+appender.DRFA.type = RollingFile
+appender.DRFA.name = DRFA
+appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
+# Use %pid in the filePattern to append <process-id>@<host-name> to the 
filename if you want separate log files for different CLI session
+appender.DRFA.filePattern = 
${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
+appender.DRFA.layout.type = PatternLayout
+appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
+appender.DRFA.policies.type = Policies
+appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
+appender.DRFA.policies.time.interval = 1
+appender.DRFA.policies.time.modulate = true
+appender.DRFA.strategy.type = DefaultRolloverStrategy
+appender.DRFA.strategy.max = 30
+
+# list of all loggers
+loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, 
HadoopConf
+
+logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
+logger.NIOServerCnxn.level = WARN
+
+logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
+logger.ClientCnxnSocketNIO.level = WARN
+
+logger.DataNucleus.name = DataNucleus
+logger.DataNucleus.level = ERROR
+
+logger.Datastore.name = Datastore
+logger.Datastore.level = ERROR
+
+logger.JPOX.name = JPOX
+logger.JPOX.level = ERROR
+
+logger.HadoopConf.name = org.apache.hadoop.conf.Configuration
+logger.HadoopConf.level = ERROR
+
+# root logger
+rootLogger.level = ${sys:hive.log.level}
+rootLogger.appenderRefs = root
+rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
index deade5f..5fa2bf1 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
 import org.apache.hadoop.hive.llap.daemon.impl.LlapDaemon;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.Service;

http://git-wip-us.apache.org/repos/asf/hive/blob/f8f50ab1/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemonProtocolServerImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemonProtocolServerImpl.java
 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemonProtocolServerImpl.java
index 1cef218..a65bf5c 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemonProtocolServerImpl.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TestLlapDaemonProtocolServerImpl.java
@@ -25,7 +25,7 @@ import com.google.protobuf.ServiceException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.llap.configuration.LlapConfiguration;
+import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration;
 import org.apache.hadoop.hive.llap.daemon.ContainerRunner;
 import org.apache.hadoop.hive.llap.protocol.LlapProtocolBlockingPB;
 import 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto;
@@ -39,7 +39,7 @@ public class TestLlapDaemonProtocolServerImpl {
 
   @Test(timeout = 10000)
   public void test() throws ServiceException, IOException {
-    LlapConfiguration daemonConf = new LlapConfiguration();
+    LlapDaemonConfiguration daemonConf = new LlapDaemonConfiguration();
     int rpcPort = HiveConf.getIntVar(daemonConf, 
ConfVars.LLAP_DAEMON_RPC_PORT);
     int numHandlers = HiveConf.getIntVar(daemonConf, 
ConfVars.LLAP_DAEMON_RPC_NUM_HANDLERS);
     ContainerRunner containerRunnerMock = mock(ContainerRunner.class);

Reply via email to