[ 
https://issues.apache.org/jira/browse/HUDI-1802?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Udit Mehrotra resolved HUDI-1802.
---------------------------------
    Resolution: Fixed

Resolved with https://github.com/apache/hudi/pull/2835

> Timeline Server Bundle need to include com.esotericsoftware package
> -------------------------------------------------------------------
>
>                 Key: HUDI-1802
>                 URL: https://issues.apache.org/jira/browse/HUDI-1802
>             Project: Apache Hudi
>          Issue Type: Bug
>          Components: Common Core
>            Reporter: cdmikechen
>            Assignee: cdmikechen
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 0.9.0
>
>
> When using Timeline Server Bundle to be a timeline remote server, it will not 
> work fine sometimes when getting files.
> {code}
> 21/04/16 02:32:16 INFO service.FileSystemViewHandler: 
> TimeTakenMillis[Total=1, Refresh=0, handle=1, Check=0], Success=true, 
> Query=basepath=%2Fhive%2Fwarehouse%2Fbigdata.db%2Fetl_datasource&lastinstantts=20210413051307&timelinehash=f3173e19a150f2c50e2a0f3c724351683edbf526bcbde67774f9e34981130b6b,
>  Host=hudi-timeline-server.bigdata.svc.cluster.local:26754, synced=false
> 21/04/16 02:32:17 INFO view.AbstractTableFileSystemView: Building file system 
> view for partition ()
> 21/04/16 02:32:17 INFO view.AbstractTableFileSystemView: #files found in 
> partition () =3, Time taken =8
> 21/04/16 02:32:17 INFO view.RocksDbBasedFileSystemView: Resetting and adding 
> new partition () to ROCKSDB based file-system view at 
> /home/hdfs/software/hudi/hudi-timeline-server/hoodie_timeline_rocksdb, Total 
> file-groups=1
> 21/04/16 02:32:17 INFO collection.RocksDBDAO: Prefix DELETE 
> (query=type=slice,part=,id=) on 
> hudi_view__hive_warehouse_bigdata.db_etl_datasource
> 21/04/16 02:32:17 INFO collection.RocksDBDAO: Prefix DELETE 
> (query=type=df,part=,id=) on 
> hudi_view__hive_warehouse_bigdata.db_etl_datasource
> 21/04/16 02:32:17 INFO service.FileSystemViewHandler: 
> TimeTakenMillis[Total=154, Refresh=5, handle=0, Check=0], Success=true, 
> Query=partition=&maxinstant=20210413051307&basepath=%2Fhive%2Fwarehouse%2Fbigdata.db%2Fetl_datasource&lastinstantts=20210413051307&timelinehash=f3173e19a150f2c50e2a0f3c724351683edbf526bcbde67774f9e34981130b6b,
>  Host=hudi-timeline-server.bigdata.svc.cluster.local:26754, synced=false
> 21/04/16 02:32:17 ERROR javalin.Javalin: Exception occurred while servicing 
> http-request
> java.lang.NoClassDefFoundError: com/esotericsoftware/kryo/Kryo
>       at 
> org.apache.hudi.common.util.SerializationUtils$KryoInstantiator.newKryo(SerializationUtils.java:116)
>       at 
> org.apache.hudi.common.util.SerializationUtils$KryoSerializerInstance.<init>(SerializationUtils.java:89)
>       at 
> java.lang.ThreadLocal$SuppliedThreadLocal.initialValue(ThreadLocal.java:284)
>       at java.lang.ThreadLocal.setInitialValue(ThreadLocal.java:180)
>       at java.lang.ThreadLocal.get(ThreadLocal.java:170)
>       at 
> org.apache.hudi.common.util.SerializationUtils.serialize(SerializationUtils.java:52)
>       at 
> org.apache.hudi.common.util.collection.RocksDBDAO.putInBatch(RocksDBDAO.java:172)
>       at 
> org.apache.hudi.common.table.view.RocksDbBasedFileSystemView.lambda$null$12(RocksDbBasedFileSystemView.java:237)
>       at 
> java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:184)
>       at 
> java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
>       at 
> java.util.TreeMap$EntrySpliterator.forEachRemaining(TreeMap.java:2969)
>       at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
>       at 
> java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
>       at 
> java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:151)
>       at 
> java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:174)
>       at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
>       at 
> java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:418)
>       at 
> org.apache.hudi.common.table.view.RocksDbBasedFileSystemView.lambda$null$13(RocksDbBasedFileSystemView.java:236)
>       at 
> org.apache.hudi.common.util.collection.RocksDBDAO.writeBatch(RocksDBDAO.java:154)
>       at 
> org.apache.hudi.common.table.view.RocksDbBasedFileSystemView.lambda$storePartitionView$14(RocksDbBasedFileSystemView.java:235)
>       at java.util.ArrayList.forEach(ArrayList.java:1257)
>       at 
> org.apache.hudi.common.table.view.RocksDbBasedFileSystemView.storePartitionView(RocksDbBasedFileSystemView.java:234)
>       at 
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.lambda$addFilesToView$2(AbstractTableFileSystemView.java:145)
>       at java.util.HashMap.forEach(HashMap.java:1289)
>       at 
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.addFilesToView(AbstractTableFileSystemView.java:133)
>       at 
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.lambda$ensurePartitionLoadedCorrectly$9(AbstractTableFileSystemView.java:284)
>       at 
> java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
>       at 
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensurePartitionLoadedCorrectly(AbstractTableFileSystemView.java:269)
>       at 
> org.apache.hudi.common.table.view.AbstractTableFileSystemView.getLatestBaseFilesBeforeOrOn(AbstractTableFileSystemView.java:455)
>       at 
> org.apache.hudi.timeline.service.handlers.BaseFileHandler.getLatestDataFilesBeforeOrOn(BaseFileHandler.java:57)
>       at 
> org.apache.hudi.timeline.service.FileSystemViewHandler.lambda$registerDataFilesAPI$5(FileSystemViewHandler.java:185)
>       at 
> org.apache.hudi.timeline.service.FileSystemViewHandler$ViewHandler.handle(FileSystemViewHandler.java:359)
>       at 
> io.javalin.security.SecurityUtil.noopAccessManager(SecurityUtil.kt:22)
>       at io.javalin.Javalin.lambda$addHandler$0(Javalin.java:606)
>       at 
> io.javalin.core.JavalinServlet$service$2$1.invoke(JavalinServlet.kt:46)
>       at 
> io.javalin.core.JavalinServlet$service$2$1.invoke(JavalinServlet.kt:17)
>       at 
> io.javalin.core.JavalinServlet$service$1.invoke(JavalinServlet.kt:143)
>       at io.javalin.core.JavalinServlet$service$2.invoke(JavalinServlet.kt:41)
>       at io.javalin.core.JavalinServlet.service(JavalinServlet.kt:107)
>       at 
> io.javalin.core.util.JettyServerUtil$initialize$httpHandler$1.doHandle(JettyServerUtil.kt:72)
>       at 
> org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)
>       at 
> org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)
>       at 
> org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1668)
>       at 
> org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)
>       at 
> org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)
>       at 
> org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)
>       at 
> org.eclipse.jetty.server.handler.HandlerList.handle(HandlerList.java:61)
>       at 
> org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:174)
>       at 
> org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)
>       at org.eclipse.jetty.server.Server.handle(Server.java:502)
>       at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:370)
>       at 
> org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)
>       at 
> org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)
>       at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)
>       at org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)
>       at 
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)
>       at 
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)
>       at 
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
>       at 
> org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)
>       at 
> org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)
>       at 
> org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:765)
>       at 
> org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:683)
>       at java.lang.Thread.run(Thread.java:748)
> Caused by: java.lang.ClassNotFoundException: com.esotericsoftware.kryo.Kryo
>       at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
>       at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>       at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
>       at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>       ... 63 more
> 21/04/16 02:32:17 INFO view.AbstractTableFileSystemView: Building file system 
> view for partition ()
> 21/04/16 02:32:17 INFO view.AbstractTableFileSystemView: #files found in 
> partition () =3, Time taken =2
> 21/04/16 02:32:17 INFO view.RocksDbBasedFileSystemView: Resetting and adding 
> new partition () to ROCKSDB based file-system view at 
> /home/hdfs/software/hudi/hudi-timeline-server/hoodie_timeline_rocksdb, Total 
> file-groups=1
> 21/04/16 02:32:17 INFO collection.RocksDBDAO: Prefix DELETE 
> (query=type=slice,part=,id=) on 
> hudi_view__hive_warehouse_bigdata.db_etl_datasource
> 21/04/16 02:32:17 INFO collection.RocksDBDAO: Prefix DELETE 
> (query=type=df,part=,id=) on 
> hudi_view__hive_warehouse_bigdata.db_etl_datasource
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to