This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 32b6dc3b7 [KYUUBI #5426] [MINOR][KSHC] Avoid use class.newInstance 
directly
32b6dc3b7 is described below

commit 32b6dc3b743c1cf5c97508c1d320f873a694869d
Author: sychen <[email protected]>
AuthorDate: Mon Oct 16 21:25:39 2023 +0800

    [KYUUBI #5426] [MINOR][KSHC] Avoid use class.newInstance directly
    
    ### _Why are the changes needed?_
    
    Remove the deprecated usage.
    
    
https://github.com/openjdk/jdk11u-dev/blob/c780db754e14ff77995dc090396d1958cea1ada1/src/java.base/share/classes/java/lang/Class.java#L534-L535
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    No.
    
    Closes #5426 from cxzl25/newInstance.
    
    Closes #5426
    
    dcb679b95 [sychen] avoid use class.newInstance directly
    
    Authored-by: sychen <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../kyuubi/spark/connector/hive/read/HivePartitionedReader.scala     | 5 +++--
 .../org/apache/kyuubi/spark/connector/hive/read/HiveReader.scala     | 2 +-
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HivePartitionedReader.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HivePartitionedReader.scala
index 732643eb1..5463a7bdd 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HivePartitionedReader.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HivePartitionedReader.scala
@@ -47,7 +47,7 @@ case class HivePartitionedReader(
 
   private val hiveConf = broadcastHiveConf.value.value
 
-  private val tableDeser = tableDesc.getDeserializerClass.newInstance()
+  private val tableDeser = 
tableDesc.getDeserializerClass.getDeclaredConstructor().newInstance()
   tableDeser.initialize(hiveConf, tableDesc.getProperties)
 
   private val localDeser: Deserializer = bindPartitionOpt match {
@@ -55,7 +55,8 @@ case class HivePartitionedReader(
       val tableProperties = tableDesc.getProperties
       val props = new Properties(tableProperties)
       val deserializer =
-        
bindPartition.getDeserializer.getClass.asInstanceOf[Class[Deserializer]].newInstance()
+        bindPartition.getDeserializer.getClass.asInstanceOf[
+          Class[Deserializer]].getDeclaredConstructor().newInstance()
       deserializer.initialize(hiveConf, props)
       deserializer
     case _ => tableDeser
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveReader.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveReader.scala
index 54f6e80c0..f6a7b194e 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveReader.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveReader.scala
@@ -62,7 +62,7 @@ object HiveReader {
 
     HiveShim.appendReadColumns(hiveConf, neededColumnIDs, neededColumnNames)
 
-    val deserializer = tableDesc.getDeserializerClass.newInstance
+    val deserializer = 
tableDesc.getDeserializerClass.getDeclaredConstructor().newInstance()
     deserializer.initialize(hiveConf, tableDesc.getProperties)
 
     // Specifies types and object inspectors of columns to be scanned.

Reply via email to