tszerszen commented on a change in pull request #13743:
URL: https://github.com/apache/beam/pull/13743#discussion_r566051297



##########
File path: 
runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java
##########
@@ -123,10 +135,66 @@ public PortablePipelineResult run(RunnerApi.Pipeline 
pipeline, JobInfo jobInfo)
         "Will stage {} files. (Enable logging at DEBUG level to see which 
files will be staged.)",
         pipelineOptions.getFilesToStage().size());
     LOG.debug("Staging files: {}", pipelineOptions.getFilesToStage());
-
     PortablePipelineResult result;
     final JavaSparkContext jsc = 
SparkContextFactory.getSparkContext(pipelineOptions);
 
+    EventLoggingListener eventLoggingListener = null;
+    if (pipelineOptions.getEventLogEnabled()) {
+      eventLoggingListener =
+          new EventLoggingListener(
+              jobInfo.jobId(),
+              new scala.Option<String>() {
+                @Override
+                public boolean isEmpty() {
+                  return false;
+                }
+
+                @Override
+                public String get() {
+                  return jobInfo.jobName();
+                }
+
+                @Override
+                public Object productElement(int i) {
+                  return null;
+                }
+
+                @Override
+                public int productArity() {
+                  return 0;
+                }
+
+                @Override
+                public boolean canEqual(Object o) {
+                  return false;
+                }
+              },
+              new URI(pipelineOptions.getSparkHistoryDir()),
+              jsc.getConf(),
+              jsc.hadoopConfiguration());
+      eventLoggingListener.initializeLogIfNecessary(false, false);
+      eventLoggingListener.start();
+      scala.collection.immutable.Map<String, String> logUrlMap =
+          new scala.collection.immutable.HashMap<String, String>();
+      Tuple2<String, String>[] sparkConfList = jsc.getConf().getAll();

Review comment:
       I've change the logic to use `getAllWithPrefix("spark.executor.id")`.

##########
File path: 
runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java
##########
@@ -123,10 +135,66 @@ public PortablePipelineResult run(RunnerApi.Pipeline 
pipeline, JobInfo jobInfo)
         "Will stage {} files. (Enable logging at DEBUG level to see which 
files will be staged.)",
         pipelineOptions.getFilesToStage().size());
     LOG.debug("Staging files: {}", pipelineOptions.getFilesToStage());
-
     PortablePipelineResult result;
     final JavaSparkContext jsc = 
SparkContextFactory.getSparkContext(pipelineOptions);
 
+    EventLoggingListener eventLoggingListener = null;
+    if (pipelineOptions.getEventLogEnabled()) {
+      eventLoggingListener =
+          new EventLoggingListener(
+              jobInfo.jobId(),
+              new scala.Option<String>() {
+                @Override
+                public boolean isEmpty() {
+                  return false;
+                }
+
+                @Override
+                public String get() {
+                  return jobInfo.jobName();
+                }
+
+                @Override
+                public Object productElement(int i) {
+                  return null;
+                }
+
+                @Override
+                public int productArity() {
+                  return 0;
+                }
+
+                @Override
+                public boolean canEqual(Object o) {
+                  return false;
+                }
+              },
+              new URI(pipelineOptions.getSparkHistoryDir()),
+              jsc.getConf(),
+              jsc.hadoopConfiguration());
+      eventLoggingListener.initializeLogIfNecessary(false, false);
+      eventLoggingListener.start();
+      scala.collection.immutable.Map<String, String> logUrlMap =
+          new scala.collection.immutable.HashMap<String, String>();
+      Tuple2<String, String>[] sparkConfList = jsc.getConf().getAll();

Review comment:
       I've changed the logic to use `getAllWithPrefix("spark.executor.id")`.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to