Author: brock
Date: Mon Dec 1 16:34:57 2014
New Revision: 1642709
URL: http://svn.apache.org/r1642709
Log:
HIVE-8998 - Logging is not configured in spark-submit sub-process (Brock)
Modified:
hive/branches/spark/data/conf/spark/hive-site.xml
hive/branches/spark/data/conf/spark/log4j.properties
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Modified: hive/branches/spark/data/conf/spark/hive-site.xml
URL:
http://svn.apache.org/viewvc/hive/branches/spark/data/conf/spark/hive-site.xml?rev=1642709&r1=1642708&r2=1642709&view=diff
==============================================================================
--- hive/branches/spark/data/conf/spark/hive-site.xml (original)
+++ hive/branches/spark/data/conf/spark/hive-site.xml Mon Dec 1 16:34:57 2014
@@ -206,8 +206,8 @@
</property>
<property>
- <name>spark.root.dir</name>
- <value>${spark.home}</value>
+ <name>spark.log.dir</name>
+ <value>${spark.home}/logs/</value>
</property>
</configuration>
Modified: hive/branches/spark/data/conf/spark/log4j.properties
URL:
http://svn.apache.org/viewvc/hive/branches/spark/data/conf/spark/log4j.properties?rev=1642709&r1=1642708&r2=1642709&view=diff
==============================================================================
--- hive/branches/spark/data/conf/spark/log4j.properties (original)
+++ hive/branches/spark/data/conf/spark/log4j.properties Mon Dec 1 16:34:57
2014
@@ -1,9 +1,4 @@
-# Set everything to be logged to the console
-log4j.rootCategory=INFO, DRFA
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p
%c{1}: %m%n
+log4j.rootCategory=DEBUG, DRFA
# Settings to quiet third party logs that are too verbose
log4j.logger.org.eclipse.jetty=WARN
@@ -11,11 +6,9 @@ log4j.logger.org.eclipse.jetty.util.comp
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-
-
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${spark.root.dir}/logs/spark.log
+log4j.appender.DRFA.File=${spark.log.dir}/spark.log
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
Modified:
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL:
http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1642709&r1=1642708&r2=1642709&view=diff
==============================================================================
---
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
(original)
+++
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Mon Dec 1 16:34:57 2014
@@ -152,7 +152,7 @@ class SparkClientImpl implements SparkCl
for (Map.Entry<String, String> e : conf.entrySet()) {
args.add("--conf");
- args.add(String.format("%s=%s", e.getKey(), e.getValue()));
+ args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
}
try {
RemoteDriver.main(args.toArray(new String[args.size()]));
@@ -172,7 +172,7 @@ class SparkClientImpl implements SparkCl
Properties allProps = new Properties();
for (Map.Entry<String, String> e : conf.entrySet()) {
- allProps.put(e.getKey(), e.getValue());
+ allProps.put(e.getKey(), conf.get(e.getKey()));
}
allProps.put(ClientUtils.CONF_KEY_SECRET, SparkClientFactory.secret);
@@ -198,6 +198,14 @@ class SparkClientImpl implements SparkCl
if (sparkHome == null) {
sparkHome = System.getProperty("spark.home");
}
+ String sparkLogDir = conf.get("spark.log.dir");
+ if (sparkLogDir == null) {
+ if (sparkHome == null) {
+ sparkLogDir = "./target/";
+ } else {
+ sparkLogDir = sparkHome + "/logs/";
+ }
+ }
if (sparkHome != null) {
argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
} else {
@@ -254,6 +262,10 @@ class SparkClientImpl implements SparkCl
LOG.debug("Running client driver with argv: {}", Joiner.on("
").join(argv));
ProcessBuilder pb = new ProcessBuilder(argv.toArray(new
String[argv.size()]));
+ Map<String, String> env = pb.environment();
+ String javaOpts = Joiner.on(" ").skipNulls().join("-Dspark.log.dir=" +
sparkLogDir,
+ env.get("SPARK_JAVA_OPTS"));
+ env.put("SPARK_JAVA_OPTS", javaOpts);
final Process child = pb.start();
int childId = childIdGenerator.incrementAndGet();