LuciferYang commented on PR #39124: URL: https://github.com/apache/spark/pull/39124#issuecomment-1358794388
Many test failed as follows: ``` 2022-12-20T03:15:37.0609530Z [info] org.apache.spark.sql.hive.execution.command.AlterTableAddColumnsSuite *** ABORTED *** (28 milliseconds) 2022-12-20T03:15:37.0701184Z [info] java.lang.reflect.InvocationTargetException: 2022-12-20T03:15:37.0701846Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.0702983Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.0703732Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.0704398Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.0705400Z [info] at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:315) 2022-12-20T03:15:37.0706077Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:514) 2022-12-20T03:15:37.0706751Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:374) 2022-12-20T03:15:37.0707378Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.$anonfun$client$1(TestHive.scala:90) 2022-12-20T03:15:37.0707917Z [info] at scala.Option.getOrElse(Option.scala:189) 2022-12-20T03:15:37.0708804Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client$lzycompute(TestHive.scala:90) 2022-12-20T03:15:37.0709589Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client(TestHive.scala:88) 2022-12-20T03:15:37.0710320Z [info] at org.apache.spark.sql.hive.test.TestHiveSingleton.$init$(TestHiveSingleton.scala:33) 2022-12-20T03:15:37.0711253Z [info] at org.apache.spark.sql.hive.execution.command.AlterTableAddColumnsSuite.<init>(AlterTableAddColumnsSuite.scala:27) 2022-12-20T03:15:37.0712160Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.0712844Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.0713829Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.0714480Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.0714972Z [info] at java.lang.Class.newInstance(Class.java:442) 2022-12-20T03:15:37.0715625Z [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:454) 2022-12-20T03:15:37.0716141Z [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) 2022-12-20T03:15:37.0716638Z [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) 2022-12-20T03:15:37.0717222Z [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 2022-12-20T03:15:37.0718079Z [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 2022-12-20T03:15:37.0718637Z [info] at java.lang.Thread.run(Thread.java:750) 2022-12-20T03:15:37.0719260Z [info] Cause: java.lang.RuntimeException: Failed to initialize default Hive configuration variables! 2022-12-20T03:15:37.0719939Z [info] at org.apache.hadoop.hive.conf.HiveConf.getConfVarInputStream(HiveConf.java:3638) 2022-12-20T03:15:37.0720558Z [info] at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:4057) 2022-12-20T03:15:37.0721115Z [info] at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:4014) 2022-12-20T03:15:37.0721873Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl$.newHiveConf(HiveClientImpl.scala:1309) 2022-12-20T03:15:37.0722615Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:176) 2022-12-20T03:15:37.0723562Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:141) 2022-12-20T03:15:37.0724265Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.0725154Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.0815583Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.0816308Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.0817005Z [info] at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:315) 2022-12-20T03:15:37.0817691Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:514) 2022-12-20T03:15:37.0818294Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:374) 2022-12-20T03:15:37.0818947Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.$anonfun$client$1(TestHive.scala:90) 2022-12-20T03:15:37.0819658Z [info] at scala.Option.getOrElse(Option.scala:189) 2022-12-20T03:15:37.0820254Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client$lzycompute(TestHive.scala:90) 2022-12-20T03:15:37.0820931Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client(TestHive.scala:88) 2022-12-20T03:15:37.0821578Z [info] at org.apache.spark.sql.hive.test.TestHiveSingleton.$init$(TestHiveSingleton.scala:33) 2022-12-20T03:15:37.0822321Z [info] at org.apache.spark.sql.hive.execution.command.AlterTableAddColumnsSuite.<init>(AlterTableAddColumnsSuite.scala:27) 2022-12-20T03:15:37.0823043Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.0823728Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.0824474Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.0825300Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.0825805Z [info] at java.lang.Class.newInstance(Class.java:442) 2022-12-20T03:15:37.0826341Z [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:454) 2022-12-20T03:15:37.0826959Z [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) 2022-12-20T03:15:37.0827461Z [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) 2022-12-20T03:15:37.0832346Z [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 2022-12-20T03:15:37.0838605Z [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 2022-12-20T03:15:37.0844439Z [info] at java.lang.Thread.run(Thread.java:750) 2022-12-20T03:15:37.0851150Z [info] Cause: java.lang.IllegalArgumentException: Not supported: http://javax.xml.XMLConstants/property/accessExternalDTD 2022-12-20T03:15:37.0857679Z [info] at org.apache.xalan.processor.TransformerFactoryImpl.setAttribute(TransformerFactoryImpl.java:571) 2022-12-20T03:15:37.0863755Z [info] at org.apache.hadoop.util.XMLUtils.newSecureTransformerFactory(XMLUtils.java:141) 2022-12-20T03:15:37.0869737Z [info] at org.apache.hadoop.conf.Configuration.writeXml(Configuration.java:3584) 2022-12-20T03:15:37.0875703Z [info] at org.apache.hadoop.conf.Configuration.writeXml(Configuration.java:3550) 2022-12-20T03:15:37.0881683Z [info] at org.apache.hadoop.conf.Configuration.writeXml(Configuration.java:3546) 2022-12-20T03:15:37.0887575Z [info] at org.apache.hadoop.hive.conf.HiveConf.getConfVarInputStream(HiveConf.java:3634) 2022-12-20T03:15:37.0893660Z [info] at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:4057) 2022-12-20T03:15:37.0898428Z [info] at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:4014) 2022-12-20T03:15:37.0904308Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl$.newHiveConf(HiveClientImpl.scala:1309) 2022-12-20T03:15:37.0910423Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:176) 2022-12-20T03:15:37.0916293Z [info] at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:141) 2022-12-20T03:15:37.0921497Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.0927701Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.0932171Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.0938174Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.0943319Z [info] at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:315) 2022-12-20T03:15:37.0992641Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:514) 2022-12-20T03:15:37.1065786Z [info] at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:374) 2022-12-20T03:15:37.1066478Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.$anonfun$client$1(TestHive.scala:90) 2022-12-20T03:15:37.1067041Z [info] at scala.Option.getOrElse(Option.scala:189) 2022-12-20T03:15:37.1067646Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client$lzycompute(TestHive.scala:90) 2022-12-20T03:15:37.1068489Z [info] at org.apache.spark.sql.hive.test.TestHiveExternalCatalog.client(TestHive.scala:88) 2022-12-20T03:15:37.1069148Z [info] at org.apache.spark.sql.hive.test.TestHiveSingleton.$init$(TestHiveSingleton.scala:33) 2022-12-20T03:15:37.1069906Z [info] at org.apache.spark.sql.hive.execution.command.AlterTableAddColumnsSuite.<init>(AlterTableAddColumnsSuite.scala:27) 2022-12-20T03:15:37.1070634Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2022-12-20T03:15:37.1071314Z [info] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2022-12-20T03:15:37.1072059Z [info] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2022-12-20T03:15:37.1072709Z [info] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2022-12-20T03:15:37.1073209Z [info] at java.lang.Class.newInstance(Class.java:442) 2022-12-20T03:15:37.1073822Z [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:454) 2022-12-20T03:15:37.1074354Z [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) 2022-12-20T03:15:37.1074847Z [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) 2022-12-20T03:15:37.1075432Z [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 2022-12-20T03:15:37.1076054Z [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 2022-12-20T03:15:37.1076558Z [info] at java.lang.Thread.run(Thread.java:750) ``` The test failed due to the hive conf failed to initialize after upgrading hadoop 3.3.5, it seems that Spark need to wait for hive to support hadoop 3.3.5 first? also cc @sunchao @dongjoon-hyun -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
