danny0405 commented on issue #3680:
URL: https://github.com/apache/hudi/issues/3680#issuecomment-922247348


   > > Hi, here is the doc how you can sync the hive: 
https://www.yuque.com/docs/share/01c98494-a980-414c-9c45-152023bf3c17?#IsoNU
   > > 
   > > 1. You may need to modify the hive dependency version as what you are 
using
   > > 2. You need to add the hive profile
   > 
   > Hi, I have done as what you said above, but another error appeared:
   > 
   > _2021-09-18 16:23:22,723 INFO 
org.apache.flink.runtime.resourcemanager.slotmanager.DeclarativeSlotManager [] 
- Clearing resource requirements of job e211f6a0afcc69363a6b9915b367a3d5 
2021-09-18 16:23:22,722 INFO 
org.apache.flink.runtime.executiongraph.ExecutionGraph [] - Job 
insert-into_default_catalog.default_database.t10 
(e211f6a0afcc69363a6b9915b367a3d5) switched from state FAILING to FAILED. 
org.apache.flink.runtime.JobException: Recovery is suppressed by 
NoRestartBackoffTimeStrategy at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:207)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at org.apache.flink.
 
runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:197)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:188)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:677)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:435)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
sun.reflect.GeneratedMethodAccessor16.invoke(Unknown Source) ~[?:?] at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_144] at java.lang.reflect.Method.invoke(Method.java:498) 
~[?:1.8.0_144] at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcAct
 or.java:305) ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:212)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:77)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:158)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
scala.PartialFunction.applyOrElse(PartialFunction.scala:123) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
scala.PartialFunction$OrElse.applyOrElse(Part
 ialFunction.scala:171) [flink-dist_2.12-1.13.1.jar:1.13.1] at 
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.actor.Actor.aroundReceive(Actor.scala:517) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.actor.Actor.aroundReceive$(Actor.scala:515) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.actor.ActorCell.receiveMessage(ActorCell.scala:592) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.actor.ActorCell.invoke(ActorCell.scala:561) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.Mailbox.run(Mailbox.scala:225) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.Mailbox.exec(Mailbox.scala:235) [flink-dist_2.12-1.13.1.jar:1.13.1
 ] at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
[flink-dist_2.12-1.13.1.jar:1.13.1] at 
akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) 
[flink-dist_2.12-1.13.1.jar:1.13.1] Caused by: java.lang.ClassCastException: 
org.apache.hudi.common.fs.HoodieWrapperFileSystem cannot be cast to 
org.apache.hudi.common.fs.HoodieWrapperFileSystem at 
org.apache.hudi.io.storage.HoodieParquetWriter.(HoodieParquetWriter.java:63) 
~[?:?] at 
org.apache.hudi.io.storage.HoodieFileWriterFactory.newParquetFileWriter(HoodieFileWriterFactory.java:76)
 ~[?:?] at 
org.apache.hudi.io.storage.HoodieFileWriterFactory.newParquetFileWriter(HoodieFileWriterFactory.java:63)
 ~[?:?] at org.apache.hudi.io.storage.HoodieFileWriterFactory.getFileWriter(Ho
 odieFileWriterFactory.java:49) ~[?:?] at 
org.apache.hudi.io.HoodieCreateHandle.(HoodieCreateHandle.java:101) ~[?:?] at 
org.apache.hudi.io.HoodieCreateHandle.(HoodieCreateHandle.java:81) ~[?:?] at 
org.apache.hudi.io.FlinkCreateHandle.(FlinkCreateHandle.java:67) ~[?:?] at 
org.apache.hudi.io.FlinkCreateHandle.(FlinkCreateHandle.java:60) ~[?:?] at 
org.apache.hudi.client.HoodieFlinkWriteClient.getOrCreateWriteHandle(HoodieFlinkWriteClient.java:470)
 ~[?:?] at 
org.apache.hudi.client.HoodieFlinkWriteClient.upsert(HoodieFlinkWriteClient.java:149)
 ~[?:?] at 
org.apache.hudi.sink.StreamWriteFunction.lambda$initWriteFunction$1(StreamWriteFunction.java:184)
 ~[?:?] at 
org.apache.hudi.sink.StreamWriteFunction.lambda$flushRemaining$7(StreamWriteFunction.java:460)
 ~[?:?] at java.util.LinkedHashMap$LinkedValues.forEach(LinkedHashMap.java:608) 
~[?:1.8.0_144] at 
org.apache.hudi.sink.StreamWriteFunction.flushRemaining(StreamWriteFunction.java:453)
 ~[?:?] at org.apache.hudi.sink.StreamWriteFunction.endInp
 ut(StreamWriteFunction.java:150) ~[?:?] at 
org.apache.hudi.sink.common.AbstractWriteOperator.endInput(AbstractWriteOperator.java:48)
 ~[?:?] at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.endOperatorInput(StreamOperatorWrapper.java:91)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.OperatorChain.endInput(OperatorChain.java:423)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:69)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:423)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:204)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:681)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at org.apache.flink.streamin
 g.runtime.tasks.StreamTask.executeInvoke(StreamTask.java:636) 
~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runWithCleanUpOnFail(StreamTask.java:647)
 ~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:620) 
~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:779) 
~[flink-dist_2.12-1.13.1.jar:1.13.1] at 
org.apache.flink.runtime.taskmanager.Task.run(Task.java:566) 
~[flink-dist_2.12-1.13.1.jar:1.13.1] at java.lang.Thread.run(Thread.java:748) 
~[?:1.8.0_144] 2021-09-18 16:23:22,729 INFO 
org.apache.flink.runtime.checkpoint.CheckpointCoordinator [] - Stopping 
checkpoint coordinator for job e211f6a0afcc69363a6b9915b367a3d5._
   
   This is the classloader problem, what version did you use, you may need to 
copy the jar into flink lib, and in master branch and 0.9.0 cherry pick branch 
we have fixed the problem.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to