danny0405 commented on issue #3657:
URL: https://github.com/apache/hudi/issues/3657#issuecomment-920487323


   > and if I use flink 1.13.1 and hudi master, there are another ERROR:
   > org.apache.flink.runtime.JobException: Recovery is suppressed by 
NoRestartBackoffTimeStrategy
   > at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
   > at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
   > at 
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:207)
   > at 
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:197)
   > at 
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:188)
   > at 
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:677)
   > at 
org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
   > at 
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:435)
   > at sun.reflect.GeneratedMethodAccessor23.invoke(Unknown Source)
   > at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   > at java.lang.reflect.Method.invoke(Method.java:498)
   > at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:305)
   > at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:212)
   > at 
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:77)
   > at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:158)
   > at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
   > at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
   > at scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123)
   > at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
   > at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170)
   > at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
   > at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
   > at akka.actor.Actor$class.aroundReceive(Actor.scala:517)
   > at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
   > at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
   > at akka.actor.ActorCell.invoke(ActorCell.scala:561)
   > at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
   > at akka.dispatch.Mailbox.run(Mailbox.scala:225)
   > at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
   > at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
   > at 
akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
   > at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
   > at 
akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
   > Caused by: java.lang.IllegalArgumentException: hoodie.properties file 
seems invalid. Please check for left over `.updated` files if any, manually 
copy it to hoodie.properties and retry
   > at 
org.apache.hudi.common.util.ValidationUtils.checkArgument(ValidationUtils.java:40)
   > at 
org.apache.hudi.common.table.HoodieTableConfig.(HoodieTableConfig.java:184)
   > at 
org.apache.hudi.common.table.HoodieTableMetaClient.(HoodieTableMetaClient.java:114)
   > at 
org.apache.hudi.common.table.HoodieTableMetaClient.(HoodieTableMetaClient.java:74)
   > at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:607)
   > at org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:48)
   > at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.(WriteProfile.java:118)
   > at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.(DeltaWriteProfile.java:43)
   > at 
org.apache.hudi.sink.partitioner.profile.WriteProfiles.getWriteProfile(WriteProfiles.java:75)
   > at 
org.apache.hudi.sink.partitioner.profile.WriteProfiles.lambda$singleton$0(WriteProfiles.java:64)
   > at java.util.HashMap.computeIfAbsent(HashMap.java:1127)
   > at 
org.apache.hudi.sink.partitioner.profile.WriteProfiles.singleton(WriteProfiles.java:63)
   > at 
org.apache.hudi.sink.partitioner.BucketAssigners.create(BucketAssigners.java:56)
   > at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.open(BucketAssignFunction.java:123)
   > at 
org.apache.flink.api.common.functions.util.FunctionUtils.openFunction(FunctionUtils.java:34)
   > at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.open(AbstractUdfStreamOperator.java:102)
   > at 
org.apache.flink.streaming.api.operators.KeyedProcessOperator.open(KeyedProcessOperator.java:55)
   > at 
org.apache.hudi.sink.partitioner.BucketAssignOperator.open(BucketAssignOperator.java:41)
   > at 
org.apache.flink.streaming.runtime.tasks.OperatorChain.initializeStateAndOpenOperators(OperatorChain.java:437)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.restoreGates(StreamTask.java:582)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.call(StreamTaskActionExecutor.java:55)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.executeRestore(StreamTask.java:562)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runWithCleanUpOnFail(StreamTask.java:647)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:537)
   > at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:759)
   > at org.apache.flink.runtime.taskmanager.Task.run(Task.java:566)
   > at java.lang.Thread.run(Thread.java:748)
   > Suppressed: java.lang.NullPointerException
   > at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.close(BucketAssignFunction.java:244)
   > at 
org.apache.flink.api.common.functions.util.FunctionUtils.closeFunction(FunctionUtils.java:41)
   > at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.dispose(AbstractUdfStreamOperator.java:117)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.disposeAllOperators(StreamTask.java:861)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runAndSuppressThrowable(StreamTask.java:840)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.cleanUpInvoke(StreamTask.java:753)
   > at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runWithCleanUpOnFail(StreamTask.java:659)
   > ... 4 more
   
   Looks like you declare a wrong table type, try to clean the .hoodie 
directory and try again.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to