LantaoJin opened a new pull request #25944: [SPARK-29254][SQL] Failed to 
include jars passed in through --jars when isolatedLoader is enabled
URL: https://github.com/apache/spark/pull/25944
 
 
   ### What changes were proposed in this pull request?
   Failed to include jars passed in through --jars when isolatedLoader is 
enabled(spark.sql.hive.metastore.jars != builtin). How to reproduce:
   ```scala
     test("SPARK-29254: include jars passed in through --jars when 
isolatedLoader is enabled") {
       def getJarFromUrl(urlString: String): File = {
         val fileName = urlString.split("/").last
         Utils.doFetchFile(urlString, Utils.createTempDir(), fileName, new 
SparkConf, null, null)
       }
       val repository = 
"https://repository.apache.org/content/repositories/releases/";
       val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
       val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
       val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
       // download Hive 2.1.1, a non builtinHiveVersion(1.2.1) version for 
testing
       val jar3 = getJarFromUrl(s"${repository}org/apache/hive/hive-contrib/" +
         s"2.1.1/hive-contrib-2.1.1.jar").getCanonicalPath
       val jar4 = 
getJarFromUrl(s"${repository}org/apache/hive/hcatalog/hive-hcatalog-core/" +
         s"2.1.1/hive-hcatalog-core-2.1.1.jar").getCanonicalPath
       val jarsString = Seq(jar1, jar2, jar3, jar4).map(j => 
j.toString).mkString(",")
       val args = Seq(
         "--class", 
SparkSubmitClassLoaderTest.getClass.getName.stripSuffix("$"),
         "--name", "SparkSubmitClassLoaderTest",
         "--master", "local-cluster[2,1,1024]",
         "--conf", "spark.ui.enabled=false",
         "--conf", "spark.master.rest.enabled=false",
         "--conf", "spark.sql.hive.metastore.version=2.1.1",
         "--conf", "spark.sql.hive.metastore.jars=maven",
         "--driver-java-options", "-Dderby.system.durability=test",
         "--jars", jarsString,
         unusedJar.toString, "SparkSubmitClassA", "SparkSubmitClassB")
       runSparkSubmit(args)
     }
   ```
   It will get ClassNotFoundException:
   ```shell
   2019-09-25 22:11:42.854 - stderr> 19/09/25 22:11:42 ERROR log: error in 
initSerDe: java.lang.ClassNotFoundException Class 
org.apache.hive.hcatalog.data.JsonSerDe not found
   2019-09-25 22:11:42.854 - stderr> java.lang.ClassNotFoundException: Class 
org.apache.hive.hcatalog.data.JsonSerDe not found
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2101)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:84)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:77)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:289)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:271)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:663)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:646)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:898)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:937)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$createTable$1(HiveClientImpl.scala:539)
   2019-09-25 22:11:42.854 - stderr>    at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:311)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:245)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:244)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:294)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.client.HiveClientImpl.createTable(HiveClientImpl.scala:537)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$createTable$1(HiveExternalCatalog.scala:284)
   2019-09-25 22:11:42.854 - stderr>    at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.hive.HiveExternalCatalog.createTable(HiveExternalCatalog.scala:242)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.createTable(ExternalCatalogWithListener.scala:94)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:325)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.execution.command.CreateTableCommand.run(tables.scala:132)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
   2019-09-25 22:11:42.854 - stderr>    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:225)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3372)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.Dataset.withAction(Dataset.scala:3368)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.Dataset.<init>(Dataset.scala:225)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:87)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.SQLContext.sql(SQLContext.scala:550)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.hive.SparkSubmitClassLoaderTest$.main(HiveSparkSubmitSuite.scala:638)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.sql.hive.SparkSubmitClassLoaderTest.main(HiveSparkSubmitSuite.scala)
   2019-09-25 22:11:42.855 - stderr>    at 
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   2019-09-25 22:11:42.855 - stderr>    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
   2019-09-25 22:11:42.855 - stderr>    at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   2019-09-25 22:11:42.855 - stderr>    at 
java.lang.reflect.Method.invoke(Method.java:498)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:901)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:179)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:202)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:89)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:980)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:989)
   2019-09-25 22:11:42.855 - stderr>    at 
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
   ```
   
   ### Why are the changes needed?
   Fix but.
   
   
   ### Does this PR introduce any user-facing change?
   No
   
   
   ### How was this patch tested?
   Add an UT
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to