[ 
https://issues.apache.org/jira/browse/HUDI-3400?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

shibei updated HUDI-3400:
-------------------------
    Description: 
Now, when creating a hudi table, `IllegalArgumentException` will be thrown 
because of there are no data files, but has no effect on result, this issue 
will fix it.
{code:java}
[ScalaTest-run-running-TestCreateTable] WARN  
org.apache.hudi.common.table.TableSchemaResolver  - Failed to read operation 
field from avro schemajava.lang.IllegalArgumentException: Could not find any 
data file written for commit, so could not get schema for table 
file:/private/var/folders/61/5b90xn_s6bv4hrq838cmt2_00000gn/T/spark-1d0fa3f9-b039-4a12-bb7b-6e17322dcc7c/hudi_database.db/h15
   at 
org.apache.hudi.common.table.TableSchemaResolver.getTableParquetSchemaFromDataFile(TableSchemaResolver.java:88)
      at 
org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchemaFromDataFile(TableSchemaResolver.java:119)
        at 
org.apache.hudi.common.table.TableSchemaResolver.hasOperationField(TableSchemaResolver.java:480)
     at 
org.apache.hudi.common.table.TableSchemaResolver.<init>(TableSchemaResolver.java:65)
 at 
org.apache.spark.sql.hudi.HoodieSqlCommonUtils$.getTableSqlSchema(HoodieSqlCommonUtils.scala:84)
     at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema$lzycompute(HoodieCatalogTable.scala:121)
        at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema(HoodieCatalogTable.scala:119)
   at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema$lzycompute(HoodieCatalogTable.scala:154)
    at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema(HoodieCatalogTable.scala:154)
       at 
org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.createTableInCatalog(CreateHoodieTableCommand.scala:133)
 at 
org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:70)
    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
       at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
 at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370)   at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
       at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
   at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
       at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369)  at 
org.apache.spark.sql.Dataset.<init>(Dataset.scala:194)       at 
org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79)       at 
org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)        at 
org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:602)
       at 
org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:600)
       at 
org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:66)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)  at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)      at 
org.scalatest.Transformer.apply(Transformer.scala:22)        at 
org.scalatest.Transformer.apply(Transformer.scala:20)        at 
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)     at 
org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196)       at 
org.scalatest.FunSuite.withFixture(FunSuite.scala:1560)      at 
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  at 
org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)      at 
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)     at 
org.scalatest.FunSuite.runTest(FunSuite.scala:1560)  at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) 
     at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) 
     at scala.collection.immutable.List.foreach(List.scala:392)      at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)       at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)     at 
org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)    at 
org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at 
org.scalatest.Suite$class.run(Suite.scala:1147)      at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)  
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)  
    at org.scalatest.SuperEngine.runImpl(Engine.scala:521)  at 
org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at 
org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:32)
   at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
     at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)  
     at 
org.apache.spark.sql.hudi.TestHoodieSqlBase.run(TestHoodieSqlBase.scala:32)  at 
org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)    at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
 at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
 at scala.collection.immutable.List.foreach(List.scala:392)      at 
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
     at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
     at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
    at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
     at org.scalatest.tools.Runner$.run(Runner.scala:850)    at 
org.scalatest.tools.Runner.run(Runner.scala) at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
       at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)
 {code}

  was:
Now, when create a hudi table, IllegalArgumentException will be thrown because 
of there is no data file, but has no effect on result, this issue will fix it.
{code:java}
[ScalaTest-run-running-TestCreateTable] WARN  
org.apache.hudi.common.table.TableSchemaResolver  - Failed to read operation 
field from avro schemajava.lang.IllegalArgumentException: Could not find any 
data file written for commit, so could not get schema for table 
file:/private/var/folders/61/5b90xn_s6bv4hrq838cmt2_00000gn/T/spark-1d0fa3f9-b039-4a12-bb7b-6e17322dcc7c/hudi_database.db/h15
   at 
org.apache.hudi.common.table.TableSchemaResolver.getTableParquetSchemaFromDataFile(TableSchemaResolver.java:88)
      at 
org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchemaFromDataFile(TableSchemaResolver.java:119)
        at 
org.apache.hudi.common.table.TableSchemaResolver.hasOperationField(TableSchemaResolver.java:480)
     at 
org.apache.hudi.common.table.TableSchemaResolver.<init>(TableSchemaResolver.java:65)
 at 
org.apache.spark.sql.hudi.HoodieSqlCommonUtils$.getTableSqlSchema(HoodieSqlCommonUtils.scala:84)
     at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema$lzycompute(HoodieCatalogTable.scala:121)
        at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema(HoodieCatalogTable.scala:119)
   at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema$lzycompute(HoodieCatalogTable.scala:154)
    at 
org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema(HoodieCatalogTable.scala:154)
       at 
org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.createTableInCatalog(CreateHoodieTableCommand.scala:133)
 at 
org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:70)
    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
    at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
       at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
 at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370)   at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
       at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
   at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
       at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369)  at 
org.apache.spark.sql.Dataset.<init>(Dataset.scala:194)       at 
org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79)       at 
org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)        at 
org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:602)
       at 
org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:600)
       at 
org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:66)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)  at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)      at 
org.scalatest.Transformer.apply(Transformer.scala:22)        at 
org.scalatest.Transformer.apply(Transformer.scala:20)        at 
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)     at 
org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196)       at 
org.scalatest.FunSuite.withFixture(FunSuite.scala:1560)      at 
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  at 
org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)      at 
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)     at 
org.scalatest.FunSuite.runTest(FunSuite.scala:1560)  at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) 
     at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) 
     at scala.collection.immutable.List.foreach(List.scala:392)      at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)       at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)     at 
org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)    at 
org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at 
org.scalatest.Suite$class.run(Suite.scala:1147)      at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)  
    at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)  
    at org.scalatest.SuperEngine.runImpl(Engine.scala:521)  at 
org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at 
org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:32)
   at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
     at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)  
     at 
org.apache.spark.sql.hudi.TestHoodieSqlBase.run(TestHoodieSqlBase.scala:32)  at 
org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)    at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
 at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
 at scala.collection.immutable.List.foreach(List.scala:392)      at 
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
     at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
     at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
    at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
     at org.scalatest.tools.Runner$.run(Runner.scala:850)    at 
org.scalatest.tools.Runner.run(Runner.scala) at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
       at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)
 {code}


> Avoid throw exception when create hudi table
> --------------------------------------------
>
>                 Key: HUDI-3400
>                 URL: https://issues.apache.org/jira/browse/HUDI-3400
>             Project: Apache Hudi
>          Issue Type: Improvement
>            Reporter: shibei
>            Assignee: shibei
>            Priority: Major
>              Labels: pull-request-available
>
> Now, when creating a hudi table, `IllegalArgumentException` will be thrown 
> because of there are no data files, but has no effect on result, this issue 
> will fix it.
> {code:java}
> [ScalaTest-run-running-TestCreateTable] WARN  
> org.apache.hudi.common.table.TableSchemaResolver  - Failed to read operation 
> field from avro schemajava.lang.IllegalArgumentException: Could not find any 
> data file written for commit, so could not get schema for table 
> file:/private/var/folders/61/5b90xn_s6bv4hrq838cmt2_00000gn/T/spark-1d0fa3f9-b039-4a12-bb7b-6e17322dcc7c/hudi_database.db/h15
>  at 
> org.apache.hudi.common.table.TableSchemaResolver.getTableParquetSchemaFromDataFile(TableSchemaResolver.java:88)
>       at 
> org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchemaFromDataFile(TableSchemaResolver.java:119)
>         at 
> org.apache.hudi.common.table.TableSchemaResolver.hasOperationField(TableSchemaResolver.java:480)
>      at 
> org.apache.hudi.common.table.TableSchemaResolver.<init>(TableSchemaResolver.java:65)
>  at 
> org.apache.spark.sql.hudi.HoodieSqlCommonUtils$.getTableSqlSchema(HoodieSqlCommonUtils.scala:84)
>      at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema$lzycompute(HoodieCatalogTable.scala:121)
>         at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableSchema(HoodieCatalogTable.scala:119)
>    at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema$lzycompute(HoodieCatalogTable.scala:154)
>     at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.partitionSchema(HoodieCatalogTable.scala:154)
>        at 
> org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.createTableInCatalog(CreateHoodieTableCommand.scala:133)
>  at 
> org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:70)
>     at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>     at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>        at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
>  at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
> org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)     at 
> org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370)   at 
> org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
>        at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
>    at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
>        at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369)  at 
> org.apache.spark.sql.Dataset.<init>(Dataset.scala:194)       at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79)       at 
> org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)        at 
> org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:602)
>        at 
> org.apache.spark.sql.hudi.TestCreateTable$$anonfun$10.apply(TestCreateTable.scala:600)
>        at 
> org.apache.spark.sql.hudi.TestHoodieSqlBase$$anonfun$test$1.apply(TestHoodieSqlBase.scala:66)
>         at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)  at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)      at 
> org.scalatest.Transformer.apply(Transformer.scala:22)        at 
> org.scalatest.Transformer.apply(Transformer.scala:20)        at 
> org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)     at 
> org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196)       at 
> org.scalatest.FunSuite.withFixture(FunSuite.scala:1560)      at 
> org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) 
> at 
> org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  
> at 
> org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)  
> at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)      at 
> org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)     at 
> org.scalatest.FunSuite.runTest(FunSuite.scala:1560)  at 
> org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) 
> at 
> org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) 
> at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
>       at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
>       at scala.collection.immutable.List.foreach(List.scala:392)      at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)       at 
> org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
>       at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)     at 
> org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)    at 
> org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at 
> org.scalatest.Suite$class.run(Suite.scala:1147)      at 
> org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
>     at 
> org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)      
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)    
>   at org.scalatest.SuperEngine.runImpl(Engine.scala:521)  at 
> org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at 
> org.apache.spark.sql.hudi.TestHoodieSqlBase.org$scalatest$BeforeAndAfterAll$$super$run(TestHoodieSqlBase.scala:32)
>    at 
> org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
>      at 
> org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)       
> at 
> org.apache.spark.sql.hudi.TestHoodieSqlBase.run(TestHoodieSqlBase.scala:32)  
> at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)    at 
> org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
>  at 
> org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
>  at scala.collection.immutable.List.foreach(List.scala:392)      at 
> org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334) at 
> org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
>      at 
> org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
>      at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
>     at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
>      at org.scalatest.tools.Runner$.run(Runner.scala:850)    at 
> org.scalatest.tools.Runner.run(Runner.scala) at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2or3(ScalaTestRunner.java:38)
>        at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:25)
>  {code}



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

Reply via email to