[
https://issues.apache.org/jira/browse/HUDI-9069?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17929299#comment-17929299
]
Y Ethan Guo commented on HUDI-9069:
-----------------------------------
{code:java}
25/02/21 19:30:45 WARN RetryHelper: Catch Exception for N/A, will retry after
1000 ms.
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientProxy.lambda$invoke$ac328414$1(HoodieMetaserverClientProxy.java:56)
at org.apache.hudi.common.util.RetryHelper.start(RetryHelper.java:94)
at org.apache.hudi.common.util.RetryHelper.start(RetryHelper.java:122)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientProxy.invoke(HoodieMetaserverClientProxy.java:56)
at com.sun.proxy.$Proxy31.getTable(Unknown Source)
at
org.apache.hudi.common.table.HoodieTableMetaserverClient.initOrGetTable(HoodieTableMetaserverClient.java:84)
at
org.apache.hudi.common.table.HoodieTableMetaserverClient.<init>(HoodieTableMetaserverClient.java:73)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.hudi.common.util.ReflectionUtils.loadClass(ReflectionUtils.java:73)
at
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:824)
at
org.apache.hudi.common.table.HoodieTableMetaClient.access$100(HoodieTableMetaClient.java:106)
at
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:938)
at
org.apache.hudi.client.BaseHoodieClient.createMetaClient(BaseHoodieClient.java:189)
at
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:950)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.writeInternal(HoodieSparkSqlWriter.scala:514)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.$anonfun$write$1(HoodieSparkSqlWriter.scala:192)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
at
org.apache.spark.sql.adapter.BaseSpark3Adapter.sqlExecutionWithNewExecutionId(BaseSpark3Adapter.scala:107)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.write(HoodieSparkSqlWriter.scala:214)
at
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:129)
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:171)
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:48)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:107)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:107)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:437)
at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:98)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:85)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:83)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:142)
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:869)
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:391)
at
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:364)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:59)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:63)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:65)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:67)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:69)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:71)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:73)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:75)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:77)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:79)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:81)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:83)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:85)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:87)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:89)
at $line33.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:91)
at $line33.$read$$iw$$iw$$iw$$iw.<init>(<console>:93)
at $line33.$read$$iw$$iw$$iw.<init>(<console>:95)
at $line33.$read$$iw$$iw.<init>(<console>:97)
at $line33.$read$$iw.<init>(<console>:99)
at $line33.$read.<init>(<console>:101)
at $line33.$read$.<init>(<console>:105)
at $line33.$read$.<clinit>(<console>)
at $line33.$eval$.$print$lzycompute(<console>:7)
at $line33.$eval$.$print(<console>:6)
at $line33.$eval.$print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)
at
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)
at
scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)
at
scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)
at
scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:41)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:865)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:733)
at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:435)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:456)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:239)
at org.apache.spark.repl.Main$.doMain(Main.scala:78)
at org.apache.spark.repl.Main$.main(Main.scala:58)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1029)
at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:194)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:217)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1120)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1129)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: org.apache.hudi.exception.HoodieException:
NoSuchObjectException(message:default.trips1 does not exist)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$exceptionWrapper$8(HoodieMetaserverClientImp.java:189)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.getTable(HoodieMetaserverClientImp.java:100)
... 133 more
Caused by: NoSuchObjectException(message:default.trips1 does not exist)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result$getTable_resultStandardScheme.read(ThriftHoodieMetaserver.java:4651)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result$getTable_resultStandardScheme.read(ThriftHoodieMetaserver.java:4619)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result.read(ThriftHoodieMetaserver.java:4550)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:88)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$Client.recv_getTable(ThriftHoodieMetaserver.java:155)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$Client.getTable(ThriftHoodieMetaserver.java:141)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$getTable$0(HoodieMetaserverClientImp.java:100)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$exceptionWrapper$8(HoodieMetaserverClientImp.java:187)
... 134 more
25/02/21 19:30:46 ERROR RetryHelper: Still failed to N/A after retried 3 times.
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientProxy.lambda$invoke$ac328414$1(HoodieMetaserverClientProxy.java:56)
at org.apache.hudi.common.util.RetryHelper.start(RetryHelper.java:94)
at org.apache.hudi.common.util.RetryHelper.start(RetryHelper.java:122)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientProxy.invoke(HoodieMetaserverClientProxy.java:56)
at com.sun.proxy.$Proxy31.getTable(Unknown Source)
at
org.apache.hudi.common.table.HoodieTableMetaserverClient.initOrGetTable(HoodieTableMetaserverClient.java:84)
at
org.apache.hudi.common.table.HoodieTableMetaserverClient.<init>(HoodieTableMetaserverClient.java:73)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.hudi.common.util.ReflectionUtils.loadClass(ReflectionUtils.java:73)
at
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:824)
at
org.apache.hudi.common.table.HoodieTableMetaClient.access$100(HoodieTableMetaClient.java:106)
at
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:938)
at
org.apache.hudi.client.BaseHoodieClient.createMetaClient(BaseHoodieClient.java:189)
at
org.apache.hudi.client.BaseHoodieWriteClient.startCommitWithTime(BaseHoodieWriteClient.java:950)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.writeInternal(HoodieSparkSqlWriter.scala:514)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.$anonfun$write$1(HoodieSparkSqlWriter.scala:192)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
at
org.apache.spark.sql.adapter.BaseSpark3Adapter.sqlExecutionWithNewExecutionId(BaseSpark3Adapter.scala:107)
at
org.apache.hudi.HoodieSparkSqlWriterInternal.write(HoodieSparkSqlWriter.scala:214)
at
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:129)
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:171)
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:48)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:107)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:107)
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:437)
at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:98)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:85)
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:83)
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:142)
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:869)
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:391)
at
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:364)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:59)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:63)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:65)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:67)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:69)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:71)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:73)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:75)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:77)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:79)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:81)
at
$line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:83)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:85)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:87)
at $line33.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:89)
at $line33.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:91)
at $line33.$read$$iw$$iw$$iw$$iw.<init>(<console>:93)
at $line33.$read$$iw$$iw$$iw.<init>(<console>:95)
at $line33.$read$$iw$$iw.<init>(<console>:97)
at $line33.$read$$iw.<init>(<console>:99)
at $line33.$read.<init>(<console>:101)
at $line33.$read$.<init>(<console>:105)
at $line33.$read$.<clinit>(<console>)
at $line33.$eval$.$print$lzycompute(<console>:7)
at $line33.$eval$.$print(<console>:6)
at $line33.$eval.$print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:747)
at
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1020)
at
scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:568)
at
scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:36)
at
scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:116)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:41)
at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:567)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:594)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:564)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:865)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:883)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:733)
at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:435)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:456)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:239)
at org.apache.spark.repl.Main$.doMain(Main.scala:78)
at org.apache.spark.repl.Main$.main(Main.scala:58)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1029)
at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:194)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:217)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91)
at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1120)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1129)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: org.apache.hudi.exception.HoodieException:
NoSuchObjectException(message:default.trips1 does not exist)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$exceptionWrapper$8(HoodieMetaserverClientImp.java:189)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.getTable(HoodieMetaserverClientImp.java:100)
... 133 more
Caused by: NoSuchObjectException(message:default.trips1 does not exist)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result$getTable_resultStandardScheme.read(ThriftHoodieMetaserver.java:4651)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result$getTable_resultStandardScheme.read(ThriftHoodieMetaserver.java:4619)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$getTable_result.read(ThriftHoodieMetaserver.java:4550)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:88)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$Client.recv_getTable(ThriftHoodieMetaserver.java:155)
at
org.apache.hudi.metaserver.thrift.ThriftHoodieMetaserver$Client.getTable(ThriftHoodieMetaserver.java:141)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$getTable$0(HoodieMetaserverClientImp.java:100)
at
org.apache.hudi.metaserver.client.HoodieMetaserverClientImp.lambda$exceptionWrapper$8(HoodieMetaserverClientImp.java:187)
... 134 more
25/02/21 19:30:52 WARN RequestHandler: Bad request response due to client view
behind server view. Last known instant from client was 20250221193043106 but
server has the following timeline []
25/02/21 19:30:52 WARN PriorityBasedFileSystemView: Got error running preferred
function. Likely due to another concurrent writer in progress. Trying secondary
25/02/21 19:30:52 WARN PriorityBasedFileSystemView: Routing request to
secondary file-system view {code}
> meta server does not work
> -------------------------
>
> Key: HUDI-9069
> URL: https://issues.apache.org/jira/browse/HUDI-9069
> Project: Apache Hudi
> Issue Type: Bug
> Reporter: Davis Zhang
> Priority: Major
> Fix For: 1.1.0
>
>
> {code:java}
> diff --git a/packaging/bundle-validation/service/read.scala
> b/packaging/bundle-validation/service/read.scala
> index b9780ffa576..caa6ee2e689 100644
> --- a/packaging/bundle-validation/service/read.scala
> +++ b/packaging/bundle-validation/service/read.scala
> @@ -22,7 +22,5 @@ spark.read.format("hudi").
> option("hoodie.table.name", tableName).
> option("hoodie.database.name", "default").
> option("hoodie.metadata.enable", "false").
> - option("hoodie.metaserver.enabled", "true").
> - option("hoodie.metaserver.uris", "thrift://localhost:9090").
>
> load(basePath).coalesce(1).write.csv("/tmp/metaserver-bundle/sparkdatasource/trips/results")
> System.exit(0)
> diff --git a/packaging/bundle-validation/service/write.scala
> b/packaging/bundle-validation/service/write.scala
> index 86fd759d923..378e7381f18 100644
> --- a/packaging/bundle-validation/service/write.scala
> +++ b/packaging/bundle-validation/service/write.scala
> @@ -41,8 +41,6 @@ df.write.format("hudi").
> option("hoodie.datasource.meta.sync.enable", "false").
> option("hoodie.datasource.hive_sync.enable", "false").
> option("hoodie.metadata.enable", "false").
> - option("hoodie.metaserver.enabled", "true").
> - option("hoodie.metaserver.uris", "thrift://localhost:9090").
> mode(Overwrite).
> save(basePath)
>
> ~
> {code}
>
> The write.scala does not write successfully, the timeline on the FS is empty
> when read.scala runs, it will give a completion instant whose completion time
> is null, which does not make sense.
>
> {code:java}
> scala> spark.read.format("hudi").
> | option("hoodie.table.name", tableName).
> | option("hoodie.database.name", "default").
> | option("hoodie.metadata.enable", "false").
> | option("hoodie.metaserver.enabled", "true").
> | option("hoodie.metaserver.uris", "thrift://localhost:9090").
> |
> load(basePath).coalesce(1).write.csv("/tmp/metaserver-bundle/sparkdatasource/trips/results")
> java.lang.IllegalArgumentException: Completion time should not be empty
> at
> org.apache.hudi.common.util.ValidationUtils.checkArgument(ValidationUtils.java:42)
> at
> org.apache.hudi.common.table.timeline.versioning.v2.InstantFileNameGeneratorV2.getCompleteFileName(InstantFileNameGeneratorV2.java:286)
> at
> org.apache.hudi.common.table.timeline.versioning.v2.InstantFileNameGeneratorV2.getFileName(InstantFileNameGeneratorV2.java:318)
> at
> org.apache.hudi.common.table.timeline.versioning.v2.ActiveTimelineV2.getContentStream(ActiveTimelineV2.java:271)
> at
> org.apache.hudi.common.table.timeline.BaseHoodieTimeline.getInstantContentStream(BaseHoodieTimeline.java:558)
> at
> org.apache.hudi.common.table.timeline.versioning.v2.ActiveTimelineV2.lambda$getCommitMetadataStream$5(ActiveTimelineV2.java:309)
> at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
> at java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:361)
> at
> java.util.stream.AbstractPipeline.copyIntoWithCancel(AbstractPipeline.java:500)
> at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:486)
> at
> java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
> at java.util.stream.FindOps$FindOp.evaluateSequential(FindOps.java:152)
> at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
> at java.util.stream.ReferencePipeline.findFirst(ReferencePipeline.java:531)
> at
> org.apache.hudi.common.table.timeline.versioning.v2.ActiveTimelineV2.getLastCommitMetadataWithValidSchema(ActiveTimelineV2.java:287)
> at
> org.apache.hudi.common.table.TableSchemaResolver.getLatestCommitMetadataWithValidSchema(TableSchemaResolver.java:487)
> at
> org.apache.hudi.common.table.TableSchemaResolver.getTableSchemaFromLatestCommitMetadata(TableSchemaResolver.java:229)
> at
> org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchemaInternal(TableSchemaResolver.java:191)
> at
> org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchema(TableSchemaResolver.java:145)
> at
> org.apache.hudi.common.table.TableSchemaResolver.getTableAvroSchema(TableSchemaResolver.java:134)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.$anonfun$x$2$10(HoodieHadoopFsRelationFactory.scala:159)
> at scala.util.Try$.apply(Try.scala:213)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.$anonfun$x$2$9(HoodieHadoopFsRelationFactory.scala:159)
> at scala.Option.getOrElse(Option.scala:189)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.x$2$lzycompute(HoodieHadoopFsRelationFactory.scala:159)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.x$2(HoodieHadoopFsRelationFactory.scala:136)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.tableAvroSchema$lzycompute(HoodieHadoopFsRelationFactory.scala:136)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.tableAvroSchema(HoodieHadoopFsRelationFactory.scala:136)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.tableStructSchema$lzycompute(HoodieHadoopFsRelationFactory.scala:176)
> at
> org.apache.hudi.HoodieBaseHadoopFsRelationFactory.tableStructSchema(HoodieHadoopFsRelationFactory.scala:175)
> at
> org.apache.hudi.HoodieMergeOnReadSnapshotHadoopFsRelationFactory.<init>(HoodieHadoopFsRelationFactory.scala:263)
> at
> org.apache.hudi.HoodieCopyOnWriteSnapshotHadoopFsRelationFactory.<init>(HoodieHadoopFsRelationFactory.scala:355)
> at org.apache.hudi.DefaultSource$.createRelation(DefaultSource.scala:332)
> at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:144)
> at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:82)
> at
> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:346)
> at
> org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:229)
> at
> org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:211)
> at scala.Option.getOrElse(Option.scala:189)
> at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:211)
> at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:186)
> ... 65 elided {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)