Author: knoguchi Date: Tue Jan 11 20:38:51 2022 New Revision: 1896927 URL: http://svn.apache.org/viewvc?rev=1896927&view=rev Log: PIG-5398: SparkLauncher does not read SPARK_CONF_DIR/spark-defaults.conf (knoguchi)
Modified: pig/trunk/CHANGES.txt pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java Modified: pig/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1896927&r1=1896926&r2=1896927&view=diff ============================================================================== --- pig/trunk/CHANGES.txt (original) +++ pig/trunk/CHANGES.txt Tue Jan 11 20:38:51 2022 @@ -26,6 +26,8 @@ PIG-5282: Upgade to Java 8 (satishsaley IMPROVEMENTS +PIG-5398: SparkLauncher does not read SPARK_CONF_DIR/spark-defaults.conf (knoguchi) + PIG-5397: Update spark2.version to 2.4.8 (knoguchi) PIG-5400: OrcStorage dropping struct(tuple) when it only holds a single field inside a Bag(array) (knoguchI) Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java?rev=1896927&r1=1896926&r2=1896927&view=diff ============================================================================== --- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java (original) +++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java Tue Jan 11 20:38:51 2022 @@ -18,9 +18,12 @@ package org.apache.pig.backend.hadoop.executionengine.spark; import java.io.File; +import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStreamReader; import java.io.PrintStream; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; @@ -596,6 +599,30 @@ public class SparkLauncher extends Launc LOG.warn("SPARK_HOME is not set"); } + String sparkConfEnv = System.getenv("SPARK_CONF_DIR"); + if( sparkConfEnv == null && sparkHome != null) { + sparkConfEnv = sparkHome + "/conf"; + } + if( sparkConfEnv != null ) { + try { + Properties props = new Properties(); + File propsFile = new File (sparkConfEnv,"spark-defaults.conf"); + if (propsFile.isFile()) { + try (InputStreamReader isr = new InputStreamReader( + new FileInputStream(propsFile), StandardCharsets.UTF_8)) { + props.load(isr); + for (Map.Entry<Object, Object> e : props.entrySet()) { + pigCtxtProperties.setProperty(e.getKey().toString(), + e.getValue().toString().trim()); + } + } + } + } catch (IOException ex) { + LOG.warn("Reading $SPARK_CONF_DIR/spark-defaults.conf failed"); + } + } + + //Copy all spark.* properties to SparkConf for (String key : pigCtxtProperties.stringPropertyNames()) { if (key.startsWith("spark.")) {