echauchot commented on code in PR #24862:
URL: https://github.com/apache/beam/pull/24862#discussion_r1067006400
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -95,24 +122,37 @@ public static SparkSession
getOrCreateSession(SparkStructuredStreamingPipelineOp
if (options.getUseActiveSparkSession()) {
return SparkSession.active();
}
- return sessionBuilder(options.getSparkMaster(), options.getAppName(),
options.getFilesToStage())
- .getOrCreate();
+ return sessionBuilder(options.getSparkMaster(), options).getOrCreate();
}
/** Creates Spark session builder with some optimizations for local mode,
e.g. in tests. */
public static SparkSession.Builder sessionBuilder(String master) {
- return sessionBuilder(master, null, null);
+ return sessionBuilder(master, null);
}
private static SparkSession.Builder sessionBuilder(
- String master, @Nullable String appName, @Nullable List<String> jars) {
- SparkConf sparkConf = new SparkConf();
- sparkConf.setMaster(master);
- if (appName != null) {
- sparkConf.setAppName(appName);
- }
- if (jars != null && !jars.isEmpty()) {
- sparkConf.setJars(jars.toArray(new String[0]));
+ String master, @Nullable SparkStructuredStreamingPipelineOptions
options) {
+
+ SparkConf sparkConf = new SparkConf().setIfMissing("spark.master", master);
+ master = sparkConf.get("spark.master"); // update to effective master
Review Comment:
I'm not sure modifying `master` input parameter will update the beam
configuration as `sessionBuilder()` is called with either
`SparkStructuredStreamingPipelineOptions.getMaster()` or `SparkSessionRule`
default (static "local[2]") or as master argument.
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -130,19 +170,35 @@ private static SparkSession.Builder sessionBuilder(
// mode, so try to align with value of "sparkMaster" option in this case.
// We should not overwrite this value (or any user-defined spark
configuration value) if the
// user has already configured it.
- if (master != null
- && !master.equals("local[*]")
- && master.startsWith("local[")
- && System.getProperty("spark.sql.shuffle.partitions") == null) {
- int numPartitions =
- Integer.parseInt(master.substring("local[".length(), master.length()
- 1));
- if (numPartitions > 0) {
- sparkConf.set("spark.sql.shuffle.partitions",
String.valueOf(numPartitions));
- }
+ int partitions = localNumPartitions(master);
+ if (partitions > 0) {
+ sparkConf.setIfMissing("spark.sql.shuffle.partitions",
Integer.toString(partitions));
}
+
return SparkSession.builder().config(sparkConf);
}
+ @SuppressWarnings({"return", "toarray.nullable.elements",
"methodref.receiver"}) // safe to ignore
+ private static String[] filesToStage(
+ SparkStructuredStreamingPipelineOptions opts, Collection<String>
excludes) {
+ Collection<String> files = opts.getFilesToStage();
+ if (files == null || files.isEmpty()) {
+ return EMPTY_STRING_ARRAY;
+ }
+ if (!excludes.isEmpty()) {
+ files = Collections2.filter(files, f ->
!excludes.stream().anyMatch(f::contains));
+ }
+ return files.toArray(EMPTY_STRING_ARRAY);
+ }
+
+ private static String[] sparkJars(SparkConf conf) {
Review Comment:
nit: rename to configuredSparkJars for clarity
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -95,24 +122,37 @@ public static SparkSession
getOrCreateSession(SparkStructuredStreamingPipelineOp
if (options.getUseActiveSparkSession()) {
return SparkSession.active();
}
- return sessionBuilder(options.getSparkMaster(), options.getAppName(),
options.getFilesToStage())
- .getOrCreate();
+ return sessionBuilder(options.getSparkMaster(), options).getOrCreate();
}
/** Creates Spark session builder with some optimizations for local mode,
e.g. in tests. */
public static SparkSession.Builder sessionBuilder(String master) {
- return sessionBuilder(master, null, null);
+ return sessionBuilder(master, null);
}
private static SparkSession.Builder sessionBuilder(
- String master, @Nullable String appName, @Nullable List<String> jars) {
- SparkConf sparkConf = new SparkConf();
- sparkConf.setMaster(master);
- if (appName != null) {
- sparkConf.setAppName(appName);
- }
- if (jars != null && !jars.isEmpty()) {
- sparkConf.setJars(jars.toArray(new String[0]));
+ String master, @Nullable SparkStructuredStreamingPipelineOptions
options) {
+
+ SparkConf sparkConf = new SparkConf().setIfMissing("spark.master", master);
+ master = sparkConf.get("spark.master"); // update to effective master
+
+ if (options != null) {
+ sparkConf.setAppName(options.getAppName());
Review Comment:
`SparkStructuredStreamingPipelineOptions.getAppName()` can be null which
will raise a NPE:
https://github.com/apache/spark/blob/7ec5af4c052229dbe94628951df2f67b3036609f/core/src/main/scala/org/apache/spark/SparkConf.scala#L91.
You need to ensure it is not null
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -130,19 +170,35 @@ private static SparkSession.Builder sessionBuilder(
// mode, so try to align with value of "sparkMaster" option in this case.
// We should not overwrite this value (or any user-defined spark
configuration value) if the
// user has already configured it.
- if (master != null
- && !master.equals("local[*]")
- && master.startsWith("local[")
- && System.getProperty("spark.sql.shuffle.partitions") == null) {
- int numPartitions =
- Integer.parseInt(master.substring("local[".length(), master.length()
- 1));
- if (numPartitions > 0) {
- sparkConf.set("spark.sql.shuffle.partitions",
String.valueOf(numPartitions));
- }
+ int partitions = localNumPartitions(master);
+ if (partitions > 0) {
+ sparkConf.setIfMissing("spark.sql.shuffle.partitions",
Integer.toString(partitions));
}
+
return SparkSession.builder().config(sparkConf);
}
+ @SuppressWarnings({"return", "toarray.nullable.elements",
"methodref.receiver"}) // safe to ignore
+ private static String[] filesToStage(
Review Comment:
nit: rename filterConfiguredFilesToStage for clarity
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -87,6 +96,24 @@ public class SparkSessionFactory {
private static final Logger LOG =
LoggerFactory.getLogger(SparkSessionFactory.class);
+ // Patterns to exclude local JRE and certain artifact (groups) in Maven and
Gradle cache.
+ private static final Collection<String> SPARK_JAR_EXCLUDES =
Review Comment:
is there a way to extract it from the build conf rather than having it
static here? It would allow to have all the dependency related conf only in the
build conf
##########
runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkSessionFactory.java:
##########
@@ -95,24 +122,37 @@ public static SparkSession
getOrCreateSession(SparkStructuredStreamingPipelineOp
if (options.getUseActiveSparkSession()) {
return SparkSession.active();
}
- return sessionBuilder(options.getSparkMaster(), options.getAppName(),
options.getFilesToStage())
- .getOrCreate();
+ return sessionBuilder(options.getSparkMaster(), options).getOrCreate();
}
/** Creates Spark session builder with some optimizations for local mode,
e.g. in tests. */
public static SparkSession.Builder sessionBuilder(String master) {
- return sessionBuilder(master, null, null);
+ return sessionBuilder(master, null);
}
private static SparkSession.Builder sessionBuilder(
- String master, @Nullable String appName, @Nullable List<String> jars) {
- SparkConf sparkConf = new SparkConf();
- sparkConf.setMaster(master);
- if (appName != null) {
- sparkConf.setAppName(appName);
- }
- if (jars != null && !jars.isEmpty()) {
- sparkConf.setJars(jars.toArray(new String[0]));
+ String master, @Nullable SparkStructuredStreamingPipelineOptions
options) {
+
+ SparkConf sparkConf = new SparkConf().setIfMissing("spark.master", master);
+ master = sparkConf.get("spark.master"); // update to effective master
+
+ if (options != null) {
+ sparkConf.setAppName(options.getAppName());
+
+ if (options.getFilesToStage() != null &&
!options.getFilesToStage().isEmpty()) {
+ // Append the files to stage provided by the user to `spark.jars`.
+ PipelineResources.prepareFilesForStaging(options);
+ String[] staged = filesToStage(options, Collections.emptyList());
+ String[] jars = sparkJars(sparkConf);
+ sparkConf.setJars(jars.length > 0 ? ArrayUtils.addAll(jars, staged) :
staged);
+ } else if (!sparkConf.contains("spark.jars") &&
!master.startsWith("local[")) {
+ // Stage classpath if `spark.jars` not set and not in local mode.
+ PipelineResources.prepareFilesForStaging(options);
Review Comment:
`prepareFilesForStaging()` will do nothing as in that case
`options.getFilesToStage()` is either null or empty. It will de equivalent to
calling options.setFilesToStage() with an empty list
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]