This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8f1b02880cb [SPARK-45392][CORE][SQL][SS] Replace `Class.newInstance()` 
with `Class.getDeclaredConstructor().newInstance()`
8f1b02880cb is described below

commit 8f1b02880cb9b82af39bb624e7fcab00b880fd4c
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Sun Oct 1 19:00:09 2023 -0700

    [SPARK-45392][CORE][SQL][SS] Replace `Class.newInstance()` with 
`Class.getDeclaredConstructor().newInstance()`
    
    ### What changes were proposed in this pull request?
    This PR replaces `Class.newInstance()` with 
`Class.getDeclaredConstructor().newInstance()` to clean up the use of 
deprecated APIs refer to
    
    
https://github.com/openjdk/jdk/blob/dfacda488bfbe2e11e8d607a6d08527710286982/src/java.base/share/classes/java/lang/Class.java#L557-L583
    
    Note: The new API no longer has the `cachedConstructor` capability that 
comes with the `Class.newInstance()`. Currently, I think there are no hotspots 
in the places that have been fixed. If hotspots are indeed discovered in the 
future, they can be optimized by adding a Loading Cache.
    
    ### Why are the changes needed?
    Clean up the use of deprecated APIs.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43193 from LuciferYang/class-newInstance.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../org/apache/spark/executor/ExecutorClassLoaderSuite.scala     | 2 +-
 sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala  | 3 ++-
 .../main/scala/org/apache/spark/sql/execution/command/ddl.scala  | 3 ++-
 .../org/apache/spark/sql/execution/datasources/DataSource.scala  | 4 ++--
 .../spark/sql/execution/datasources/FallBackFileSourceV2.scala   | 2 +-
 .../scala/org/apache/spark/sql/execution/datasources/rules.scala | 9 +++++----
 .../streaming/sources/RatePerMicroBatchProviderSuite.scala       | 3 ++-
 .../execution/streaming/sources/RateStreamProviderSuite.scala    | 5 +++--
 .../sql/execution/streaming/sources/TextSocketStreamSuite.scala  | 2 +-
 9 files changed, 19 insertions(+), 14 deletions(-)

diff --git 
a/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala 
b/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
index 8e93da7adf0..1573d0286a3 100644
--- 
a/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
@@ -335,7 +335,7 @@ class ExecutorClassLoaderSuite
         // scalastyle:off classforname
         val classB = Class.forName("TestClassB", true, classLoader)
         // scalastyle:on classforname
-        val instanceOfTestClassB = classB.newInstance()
+        val instanceOfTestClassB = 
classB.getDeclaredConstructor().newInstance()
         assert(instanceOfTestClassB.toString === "TestClassB")
         
classB.getMethod("foo").invoke(instanceOfTestClassB).asInstanceOf[String]
       }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index d446e9a15b5..3a6fb1047dc 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -763,7 +763,8 @@ class SparkSession private(
     DataSource.lookupDataSource(runner, sessionState.conf) match {
       case source if classOf[ExternalCommandRunner].isAssignableFrom(source) =>
         Dataset.ofRows(self, ExternalCommandExecutor(
-          source.newInstance().asInstanceOf[ExternalCommandRunner], command, 
options))
+          source.getDeclaredConstructor().newInstance()
+            .asInstanceOf[ExternalCommandRunner], command, options))
 
       case _ =>
         throw 
QueryCompilationErrors.commandExecutionInRunnerUnsupportedError(runner)
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index a8f7cdb2600..0ef491c3b0f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -1024,7 +1024,8 @@ object DDLUtils extends Logging {
     source match {
       case f: FileFormat => DataSourceUtils.checkFieldNames(f, schema)
       case f: FileDataSourceV2 =>
-        DataSourceUtils.checkFieldNames(f.fallbackFileFormat.newInstance(), 
schema)
+        DataSourceUtils.checkFieldNames(
+          f.fallbackFileFormat.getDeclaredConstructor().newInstance(), schema)
       case _ =>
     }
   }
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
index c701fe2dbcb..bfb919bf498 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
@@ -105,7 +105,7 @@ case class DataSource(
     // [[FileDataSourceV2]] will still be used if we call the load()/save() 
method in
     // [[DataFrameReader]]/[[DataFrameWriter]], since they use method 
`lookupDataSource`
     // instead of `providingClass`.
-    cls.newInstance() match {
+    cls.getDeclaredConstructor().newInstance() match {
       case f: FileDataSourceV2 => f.fallbackFileFormat
       case _ => cls
     }
@@ -699,7 +699,7 @@ object DataSource extends Logging {
     val useV1Sources = 
conf.getConf(SQLConf.USE_V1_SOURCE_LIST).toLowerCase(Locale.ROOT)
       .split(",").map(_.trim)
     val cls = lookupDataSource(provider, conf)
-    cls.newInstance() match {
+    cls.getDeclaredConstructor().newInstance() match {
       case d: DataSourceRegister if useV1Sources.contains(d.shortName()) => 
None
       case t: TableProvider
           if 
!useV1Sources.contains(cls.getCanonicalName.toLowerCase(Locale.ROOT)) =>
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala
index 0bd3b6c2bf0..66b5971eef2 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala
@@ -36,7 +36,7 @@ class FallBackFileSourceV2(sparkSession: SparkSession) 
extends Rule[LogicalPlan]
   override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
     case i @ InsertIntoStatement(
         d @ DataSourceV2Relation(table: FileTable, _, _, _, _), _, _, _, _, _, 
_) =>
-      val v1FileFormat = table.fallbackFileFormat.newInstance()
+      val v1FileFormat = 
table.fallbackFileFormat.getDeclaredConstructor().newInstance()
       val relation = HadoopFsRelation(
         table.fileIndex,
         table.fileIndex.partitionSchema,
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index f9b3f73ff02..c5e86ee2d03 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -284,10 +284,11 @@ case class PreprocessTableCreation(catalog: 
SessionCatalog) extends Rule[Logical
       }
   }
 
-  private def fallBackV2ToV1(cls: Class[_]): Class[_] = cls.newInstance match {
-    case f: FileDataSourceV2 => f.fallbackFileFormat
-    case _ => cls
-  }
+  private def fallBackV2ToV1(cls: Class[_]): Class[_] =
+    cls.getDeclaredConstructor().newInstance() match {
+      case f: FileDataSourceV2 => f.fallbackFileFormat
+      case _ => cls
+    }
 
   private def normalizeCatalogTable(schema: StructType, table: CatalogTable): 
CatalogTable = {
     SchemaUtils.checkSchemaColumnNameDuplication(
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RatePerMicroBatchProviderSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RatePerMicroBatchProviderSuite.scala
index 5ef531d4540..48f90e34890 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RatePerMicroBatchProviderSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RatePerMicroBatchProviderSuite.scala
@@ -29,7 +29,8 @@ class RatePerMicroBatchProviderSuite extends StreamTest {
   import testImplicits._
 
   test("RatePerMicroBatchProvider in registry") {
-    val ds = DataSource.lookupDataSource("rate-micro-batch", 
spark.sqlContext.conf).newInstance()
+    val ds = DataSource.lookupDataSource("rate-micro-batch", 
spark.sqlContext.conf)
+      .getConstructor().newInstance()
     assert(ds.isInstanceOf[RatePerMicroBatchProvider], "Could not find 
rate-micro-batch source")
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala
index 363189a3b36..556782d9c55 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala
@@ -56,14 +56,15 @@ class RateStreamProviderSuite extends StreamTest {
   }
 
   test("RateStreamProvider in registry") {
-    val ds = DataSource.lookupDataSource("rate", 
spark.sqlContext.conf).newInstance()
+    val ds = DataSource.lookupDataSource("rate", spark.sqlContext.conf)
+      .getConstructor().newInstance()
     assert(ds.isInstanceOf[RateStreamProvider], "Could not find rate source")
   }
 
   test("compatible with old path in registry") {
     val ds = DataSource.lookupDataSource(
       "org.apache.spark.sql.execution.streaming.RateSourceProvider",
-      spark.sqlContext.conf).newInstance()
+      spark.sqlContext.conf).getConstructor().newInstance()
     assert(ds.isInstanceOf[RateStreamProvider], "Could not find rate source")
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala
index 03388724ef5..92dd3a99680 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala
@@ -87,7 +87,7 @@ class TextSocketStreamSuite extends StreamTest with 
SharedSparkSession {
   test("backward compatibility with old path") {
     val ds = DataSource.lookupDataSource(
       "org.apache.spark.sql.execution.streaming.TextSocketSourceProvider",
-      spark.sqlContext.conf).newInstance()
+      spark.sqlContext.conf).getConstructor().newInstance()
     assert(ds.isInstanceOf[TextSocketSourceProvider], "Could not find socket 
source")
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to