bksrepo commented on issue #10609:
URL: https://github.com/apache/hudi/issues/10609#issuecomment-1988119816

   Any conclusion on this issue? I am facing same issue too.
   
   10:29:32.481 [qtp264384338-719] ERROR 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader - Got exception 
when reading log file
   java.lang.ClassCastException: 
org.apache.hudi.avro.model.HoodieDeleteRecordList cannot be cast to 
org.apache.hudi.avro.model.HoodieDeleteRecordList
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.deserialize(HoodieDeleteBlock.java:160)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.getRecordsToDelete(HoodieDeleteBlock.java:115)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.processQueuedBlocksForInstant(AbstractHoodieLogRecordReader.java:828)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:403)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:117)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:76)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:498)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:429)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$getOrCreateReaders$10(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
 ~[?:1.8.0_202]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:291)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:255)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:145)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataFileSystemView.getAllPartitionPaths(HoodieMetadataFileSystemView.java:72)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensureAllPartitionsLoadedCorrectly(AbstractTableFileSystemView.java:314)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.loadAllPartitions(AbstractTableFileSystemView.java:752)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.handlers.FileSliceHandler.loadAllPartitions(FileSliceHandler.java:152)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.RequestHandler.lambda$registerFileSlicesAPI$55(RequestHandler.java:422)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.RequestHandler$ViewHandler.handle(RequestHandler.java:550)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.core.security.SecurityUtil.noopAccessManager(SecurityUtil.kt:20) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet.addHandler$lambda-0(JavalinServlet.kt:96) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet$lifecycle$2$1$1.invoke(JavalinServlet.kt:43) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet$lifecycle$2$1$1.invoke(JavalinServlet.kt:43) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServletHandler.executeNextTask(JavalinServletHandler.kt:99)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$lambda-1(JavalinServletHandler.kt:85)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
java.util.concurrent.CompletableFuture.uniComposeStage(CompletableFuture.java:981)
 ~[?:1.8.0_202]
        at 
java.util.concurrent.CompletableFuture.thenCompose(CompletableFuture.java:2124) 
~[?:1.8.0_202]
        at 
io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$javalin(JavalinServletHandler.kt:85)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at io.javalin.http.JavalinServlet.service(JavalinServlet.kt:89) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.javax.servlet.http.HttpServlet.service(HttpServlet.java:790) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.jetty.JavalinJettyServlet.service(JavalinJettyServlet.kt:58) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.javax.servlet.http.HttpServlet.service(HttpServlet.java:790) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.jetty.JettyServer$start$wsAndHttpHandler$1.doHandle(JettyServer.kt:52)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.Server.handle(Server.java:516) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.handle(HttpChannel.java:479)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.FillInterest.fillable(FillInterest.java:105)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_202]
   10:29:32.502 [qtp264384338-719] ERROR 
org.apache.hudi.timeline.service.RequestHandler - Got runtime exception 
servicing request 
basepath=%2Fdatalake%2Fgayatri%2Fetl%2Fgayatric_apps%2Fnp%2Fcow&lastinstantts=20240311102853097&timelinehash=8c8ffae8e420e2a334b4a49aedf826d91fac30bacd5267b39d44390ddc866eda
   org.apache.hudi.exception.HoodieMetadataException: Failed to retrieve list 
of partition from metadata
        at 
org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:127)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataFileSystemView.getAllPartitionPaths(HoodieMetadataFileSystemView.java:72)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensureAllPartitionsLoadedCorrectly(AbstractTableFileSystemView.java:314)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.loadAllPartitions(AbstractTableFileSystemView.java:752)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.handlers.FileSliceHandler.loadAllPartitions(FileSliceHandler.java:152)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.RequestHandler.lambda$registerFileSlicesAPI$55(RequestHandler.java:422)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.timeline.service.RequestHandler$ViewHandler.handle(RequestHandler.java:550)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.core.security.SecurityUtil.noopAccessManager(SecurityUtil.kt:20) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet.addHandler$lambda-0(JavalinServlet.kt:96) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet$lifecycle$2$1$1.invoke(JavalinServlet.kt:43) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServlet$lifecycle$2$1$1.invoke(JavalinServlet.kt:43) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServletHandler.executeNextTask(JavalinServletHandler.kt:99)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$lambda-1(JavalinServletHandler.kt:85)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
java.util.concurrent.CompletableFuture.uniComposeStage(CompletableFuture.java:981)
 ~[?:1.8.0_202]
        at 
java.util.concurrent.CompletableFuture.thenCompose(CompletableFuture.java:2124) 
~[?:1.8.0_202]
        at 
io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$javalin(JavalinServletHandler.kt:85)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at io.javalin.http.JavalinServlet.service(JavalinServlet.kt:89) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.javax.servlet.http.HttpServlet.service(HttpServlet.java:790) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.jetty.JavalinJettyServlet.service(JavalinJettyServlet.kt:58) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.javax.servlet.http.HttpServlet.service(HttpServlet.java:790) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
io.javalin.jetty.JettyServer$start$wsAndHttpHandler$1.doHandle(JettyServer.kt:52)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.Server.handle(Server.java:516) 
~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpChannel.handle(HttpChannel.java:479)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.FillInterest.fillable(FillInterest.java:105)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_202]
   Caused by: org.apache.hudi.exception.HoodieException: Exception when reading 
log file 
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:414)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:117)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:76)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:498)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:429)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$getOrCreateReaders$10(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
 ~[?:1.8.0_202]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:291)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:255)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:145)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        ... 48 more
   Caused by: java.lang.ClassCastException: 
org.apache.hudi.avro.model.HoodieDeleteRecordList cannot be cast to 
org.apache.hudi.avro.model.HoodieDeleteRecordList
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.deserialize(HoodieDeleteBlock.java:160)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.getRecordsToDelete(HoodieDeleteBlock.java:115)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.processQueuedBlocksForInstant(AbstractHoodieLogRecordReader.java:828)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:403)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:117)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:76)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:498)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:429)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$getOrCreateReaders$10(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
 ~[?:1.8.0_202]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:412)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:291)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:255)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:145)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125)
 ~[org.apache.hudi_hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        ... 48 more
   10:29:32.509 [Thread-6] ERROR 
org.apache.hudi.common.table.view.PriorityBasedFileSystemView - Got error 
running preferred function. Trying secondary
   org.apache.hudi.exception.HoodieRemoteException: Server Error
        at 
org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:499)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.PriorityBasedFileSystemView.execute(PriorityBasedFileSystemView.java:69)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.PriorityBasedFileSystemView.loadAllPartitions(PriorityBasedFileSystemView.java:172)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanner.<init>(CleanPlanner.java:110) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:105)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:151)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.execute(CleanPlanActionExecutor.java:177)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.scheduleCleaning(HoodieSparkCopyOnWriteTable.java:217)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieTableServiceClient.scheduleTableServiceInternal(BaseHoodieTableServiceClient.java:628)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieTableServiceClient.clean(BaseHoodieTableServiceClient.java:751)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:861)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:834)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:865)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.autoCleanOnCommit(BaseHoodieWriteClient.java:599)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.mayBeCleanAndArchive(BaseHoodieWriteClient.java:578)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:248)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:211)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.internal.DataSourceInternalWriterHelper.commit(DataSourceInternalWriterHelper.java:89)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.spark3.internal.HoodieDataSourceInternalBatchWrite.commit(HoodieDataSourceInternalBatchWrite.java:92)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:422)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:382)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.writeWithV2(WriteToDataSourceV2Exec.scala:248)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run(WriteToDataSourceV2Exec.scala:360)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run$(WriteToDataSourceV2Exec.scala:359)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.run(WriteToDataSourceV2Exec.scala:248)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:118)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:195)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:103)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488) 
~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:133)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:856) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:311) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:247) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.hudi.commit.DatasetBulkInsertCommitActionExecutor.doExecute(DatasetBulkInsertCommitActionExecutor.java:81)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.commit.BaseDatasetBulkInsertCommitActionExecutor.execute(BaseDatasetBulkInsertCommitActionExecutor.java:102)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.bulkInsertAsRow(HoodieSparkSqlWriter.scala:910)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.writeInternal(HoodieSparkSqlWriter.scala:409)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:132) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:150) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:47)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:118)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:195)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:103)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488) 
~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:133)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:856) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:387) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:360) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:247) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_202]
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_202]
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_202]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_202]
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 
~[py4j-0.10.9.7.jar:?]
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374) 
~[py4j-0.10.9.7.jar:?]
        at py4j.Gateway.invoke(Gateway.java:282) ~[py4j-0.10.9.7.jar:?]
        at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 
~[py4j-0.10.9.7.jar:?]
        at py4j.commands.CallCommand.execute(CallCommand.java:79) 
~[py4j-0.10.9.7.jar:?]
        at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) 
~[py4j-0.10.9.7.jar:?]
        at py4j.ClientServerConnection.run(ClientServerConnection.java:106) 
~[py4j-0.10.9.7.jar:?]
        at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_202]
   Caused by: org.apache.hudi.org.apache.http.client.HttpResponseException: 
Server Error
        at 
org.apache.hudi.org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.http.client.fluent.Response.handleResponse(Response.java:90)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.org.apache.http.client.fluent.Response.returnContent(Response.java:97)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:194)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:496)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        ... 98 more
   10:29:32.590 [Thread-6] ERROR 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader - Got exception 
when reading log file
   java.lang.ClassCastException: 
org.apache.hudi.avro.model.HoodieDeleteRecordList cannot be cast to 
org.apache.hudi.avro.model.HoodieDeleteRecordList
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.deserialize(HoodieDeleteBlock.java:160)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.block.HoodieDeleteBlock.getRecordsToDelete(HoodieDeleteBlock.java:115)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.processQueuedBlocksForInstant(AbstractHoodieLogRecordReader.java:828)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:403)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:117)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.<init>(HoodieMergedLogRecordScanner.java:76)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:498)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:429)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:414)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:291)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:255)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:145)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.metadata.HoodieMetadataFileSystemView.getAllPartitionPaths(HoodieMetadataFileSystemView.java:72)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensureAllPartitionsLoadedCorrectly(AbstractTableFileSystemView.java:314)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.AbstractTableFileSystemView.loadAllPartitions(AbstractTableFileSystemView.java:752)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.PriorityBasedFileSystemView.execute(PriorityBasedFileSystemView.java:73)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.common.table.view.PriorityBasedFileSystemView.loadAllPartitions(PriorityBasedFileSystemView.java:172)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanner.<init>(CleanPlanner.java:110) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:105)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:151)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.action.clean.CleanPlanActionExecutor.execute(CleanPlanActionExecutor.java:177)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.table.HoodieSparkCopyOnWriteTable.scheduleCleaning(HoodieSparkCopyOnWriteTable.java:217)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieTableServiceClient.scheduleTableServiceInternal(BaseHoodieTableServiceClient.java:628)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieTableServiceClient.clean(BaseHoodieTableServiceClient.java:751)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:861)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:834)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:865)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.autoCleanOnCommit(BaseHoodieWriteClient.java:599)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.mayBeCleanAndArchive(BaseHoodieWriteClient.java:578)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:248)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:211)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.internal.DataSourceInternalWriterHelper.commit(DataSourceInternalWriterHelper.java:89)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.spark3.internal.HoodieDataSourceInternalBatchWrite.commit(HoodieDataSourceInternalBatchWrite.java:92)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:422)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:382)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.writeWithV2(WriteToDataSourceV2Exec.scala:248)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run(WriteToDataSourceV2Exec.scala:360)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2ExistingTableWriteExec.run$(WriteToDataSourceV2Exec.scala:359)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.AppendDataExec.run(WriteToDataSourceV2Exec.scala:248)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:118)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:195)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:103)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488) 
~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:133)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:856) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:311) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:247) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.hudi.commit.DatasetBulkInsertCommitActionExecutor.doExecute(DatasetBulkInsertCommitActionExecutor.java:81)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.commit.BaseDatasetBulkInsertCommitActionExecutor.execute(BaseDatasetBulkInsertCommitActionExecutor.java:102)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.bulkInsertAsRow(HoodieSparkSqlWriter.scala:910)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.writeInternal(HoodieSparkSqlWriter.scala:409)
 ~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:132) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:150) 
~[hudi-spark3.4-bundle_2.12-0.14.0.jar:0.14.0]
        at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:47)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:118)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:195)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:103)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:65)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:31)
 ~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488) 
~[spark-catalyst_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:133)
 ~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:856) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:387) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at 
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:360) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:247) 
~[spark-sql_2.12-3.4.1.jar:3.4.1]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
~[?:1.8.0_202]
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_202]
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_202]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_202]
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 
~[py4j-0.10.9.7.jar:?]
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374) 
~[py4j-0.10.9.7.jar:?]
        at py4j.Gateway.invoke(Gateway.java:282) ~[py4j-0.10.9.7.jar:?]
        at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 
~[py4j-0.10.9.7.jar:?]
        at py4j.commands.CallCommand.execute(CallCommand.java:79) 
~[py4j-0.10.9.7.jar:?]
        at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) 
~[py4j-0.10.9.7.jar:?]
        at py4j.ClientServerConnection.run(ClientServerConnection.java:106) 
~[py4j-0.10.9.7.jar:?]
        at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_202]
   24/03/11 10:29:32 ERROR AppendDataExec: Data source write support 
org.apache.hudi.spark3.internal.HoodieDataSourceInternalBatchWrite@742918c6 is 
aborting.
   
   ============================
   Your help is highly appreciated.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to