This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new df8d8dde527f [SPARK-55428][BUILD] Sync Netty Java options everywhere
df8d8dde527f is described below
commit df8d8dde527fab634ebc78eafb2c37c99a5e0bab
Author: Cheng Pan <[email protected]>
AuthorDate: Sun Feb 8 12:34:49 2026 -0800
[SPARK-55428][BUILD] Sync Netty Java options everywhere
### What changes were proposed in this pull request?
Remove redundant `-Dio.netty.tryReflectionSetAccessible=true` in some
building scripts/`pom.xml` - it has already been added by
`JavaModuleOptions`(`spark-submit`-based cases) or Maven property
`extraJavaTestArgs`(ScalaTests/JUnit cases)
Add the missing opts to `sql/connect/bin/spark-connect-scala-client`
```
-Dio.netty.allocator.type=pooled
-Dio.netty.handler.ssl.defaultEndpointVerificationAlgorithm=NONE
```
### Why are the changes needed?
Keep Netty Java options sync everywhere.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass GHA.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #54209 from pan3793/SPARK-55428.
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
R/run-tests.sh | 4 ++--
sql/catalyst/pom.xml | 2 +-
sql/connect/bin/spark-connect-scala-client | 2 ++
sql/core/pom.xml | 2 +-
4 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/R/run-tests.sh b/R/run-tests.sh
index 3a90b44c2b65..59186fd3a74f 100755
--- a/R/run-tests.sh
+++ b/R/run-tests.sh
@@ -30,9 +30,9 @@ if [[ $(echo $SPARK_AVRO_JAR_PATH | wc -l) -eq 1 ]]; then
fi
if [ -z "$SPARK_JARS" ]; then
- SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true
$FWDIR/../bin/spark-submit --driver-java-options
"-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf
spark.hadoop.fs.defaultFS="file:///" --conf
spark.driver.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true
-Xss4M" --conf
spark.executor.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true
-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE
+ SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true
$FWDIR/../bin/spark-submit --driver-java-options
"-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf
spark.hadoop.fs.defaultFS="file:///" --conf
spark.driver.extraJavaOptions="-Xss4M" --conf
spark.executor.extraJavaOptions="-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee
-a $LOGFILE
else
- SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true
$FWDIR/../bin/spark-submit --jars $SPARK_JARS --driver-java-options
"-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf
spark.hadoop.fs.defaultFS="file:///" --conf
spark.driver.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true
-Xss4M" --conf
spark.executor.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true
-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE
+ SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true
$FWDIR/../bin/spark-submit --jars $SPARK_JARS --driver-java-options
"-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf
spark.hadoop.fs.defaultFS="file:///" --conf
spark.driver.extraJavaOptions="-Xss4M" --conf
spark.executor.extraJavaOptions="-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee
-a $LOGFILE
fi
FAILED=$((PIPESTATUS[0]||$FAILED))
diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml
index 9b7fb89ddd9e..fc4ed86bcabb 100644
--- a/sql/catalyst/pom.xml
+++ b/sql/catalyst/pom.xml
@@ -173,7 +173,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
- <argLine>-ea -Xmx4g -Xss4m
-XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs}
-Dio.netty.tryReflectionSetAccessible=true</argLine>
+ <argLine>-ea -Xmx4g -Xss4m
-XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs}</argLine>
</configuration>
</plugin>
</plugins>
diff --git a/sql/connect/bin/spark-connect-scala-client
b/sql/connect/bin/spark-connect-scala-client
index 4d508e626df7..7f90353cc39b 100755
--- a/sql/connect/bin/spark-connect-scala-client
+++ b/sql/connect/bin/spark-connect-scala-client
@@ -71,6 +71,8 @@ JVM_ARGS="-XX:+IgnoreUnrecognizedVMOptions \
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
-Djdk.reflect.useDirectMethodHandle=false \
-Dio.netty.tryReflectionSetAccessible=true \
+ -Dio.netty.allocator.type=pooled \
+ -Dio.netty.handler.ssl.defaultEndpointVerificationAlgorithm=NONE \
--enable-native-access=ALL-UNNAMED \
$SCJVM_ARGS"
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index ab6a8f8182e6..ce6e7dcdda6c 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -344,7 +344,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
- <argLine>-ea -Xmx4g -Xss4m
-XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs}
-Dio.netty.tryReflectionSetAccessible=true</argLine>
+ <argLine>-ea -Xmx4g -Xss4m
-XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs}</argLine>
</configuration>
</plugin>
<plugin>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]