[ 
https://issues.apache.org/jira/browse/FLINK-15635?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17645804#comment-17645804
 ] 

Prabhu Joseph commented on FLINK-15635:
---------------------------------------

We are seeing below ClassNotFoundException in Sql Client after upgrading Hudi's 
Flink Version to 1.16. This is working fine with Flink-1.15.

The hudi-flink-bundle jar is added through Add Jar Command. Any idea on how to 
fix this issue. 

 
{code:java}
 Caused by: java.lang.ClassNotFoundException: 
org.apache.hudi.common.model.HoodieRecord
 at java.net.URLClassLoader.findClass(URLClassLoader.java:387) ~[?:1.8.0_352]
 at java.lang.ClassLoader.loadClass(ClassLoader.java:418) ~[?:1.8.0_352]
 at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352) ~[?:1.8.0_352]
 at java.lang.ClassLoader.loadClass(ClassLoader.java:351) ~[?:1.8.0_352]
 at java.lang.Class.forName0(Native Method) ~[?:1.8.0_352]
 at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_352]
 at 
org.apache.flink.api.java.typeutils.TypeExtractionUtils.checkAndExtractLambda(TypeExtractionUtils.java:143)
 ~[flink-dist-1.16.0.jar:1.16.0]
 at 
org.apache.flink.api.java.typeutils.TypeExtractor.getUnaryOperatorReturnType(TypeExtractor.java:539)
 ~[flink-dist-1.16.0.jar:1.16.0]
 at 
org.apache.flink.api.java.typeutils.TypeExtractor.getKeySelectorTypes(TypeExtractor.java:415)
 ~[flink-dist-1.16.0.jar:1.16.0]
 at 
org.apache.flink.api.java.typeutils.TypeExtractor.getKeySelectorTypes(TypeExtractor.java:406)
 ~[flink-dist-1.16.0.jar:1.16.0]
 at 
org.apache.flink.streaming.api.datastream.KeyedStream.<init>(KeyedStream.java:116)
 ~[flink-dist-1.16.0.jar:1.16.0]
 at 
org.apache.flink.streaming.api.datastream.DataStream.keyBy(DataStream.java:300) 
~[flink-dist-1.16.0.jar:1.16.0]
 at org.apache.hudi.sink.utils.Pipelines.hoodieStreamWrite(Pipelines.java:339) 
~[?:?]
 at 
org.apache.hudi.table.HoodieTableSink.lambda$getSinkRuntimeProvider$0(HoodieTableSink.java:104)
 ~[?:?]
 at 
org.apache.hudi.adapter.DataStreamSinkProviderAdapter.consumeDataStream(DataStreamSinkProviderAdapter.java:35)
 ~[?:?]
 at 
org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecSink.applySinkProvider(CommonExecSink.java:483)
 ~[?:?]
 at 
org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecSink.createSinkTransformation(CommonExecSink.java:203)
 ~[?:?]
 at 
org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink.translateToPlanInternal(StreamExecSink.java:176)
 ~[?:?]
 at 
org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase.translateToPlan(ExecNodeBase.java:158)
 ~[?:?]
 at 
org.apache.flink.table.planner.delegation.StreamPlanner.$anonfun$translateToPlan$1(StreamPlanner.scala:85)
 ~[?:?]
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:233) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.Iterator.foreach(Iterator.scala:937) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.Iterator.foreach$(Iterator.scala:937) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.AbstractIterator.foreach(Iterator.scala:1425) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.IterableLike.foreach(IterableLike.scala:70) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.IterableLike.foreach$(IterableLike.scala:69) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.AbstractIterable.foreach(Iterable.scala:54) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.TraversableLike.map(TraversableLike.scala:233) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.TraversableLike.map$(TraversableLike.scala:226) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at scala.collection.AbstractTraversable.map(Traversable.scala:104) 
~[flink-scala_2.12-1.16.0.jar:1.16.0]
 at 
org.apache.flink.table.planner.delegation.StreamPlanner.translateToPlan(StreamPlanner.scala:84)
 ~[?:?]
 at 
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:197)
 ~[?:?]
 at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1733)
 ~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
 at 
org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:825)
 ~[flink-table-api-java-uber-1.16.0.jar:1.16.0]
 at 
org.apache.flink.table.client.gateway.local.LocalExecutor.executeModifyOperations(LocalExecutor.java:219)
 ~[flink-sql-client-1.16.0.jar:1.16.0]{code}
 

 

> Allow passing a ClassLoader to EnvironmentSettings
> --------------------------------------------------
>
>                 Key: FLINK-15635
>                 URL: https://issues.apache.org/jira/browse/FLINK-15635
>             Project: Flink
>          Issue Type: Sub-task
>          Components: Table SQL / API
>            Reporter: Timo Walther
>            Assignee: Francesco Guardiani
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 1.16.0
>
>
> We had a couple of class loading issues in the past because people forgot to 
> use the right classloader in {{flink-table}}. The SQL Client executor code 
> hacks a classloader into the planner process by using {{wrapClassLoader}} 
> that sets the threads context classloader.
> Instead we should allow passing a class loader to environment settings. This 
> class loader can be passed to the planner and can be stored in table 
> environment, table config, etc. to have a consistent class loading behavior.
> Having this in place should replace the need for 
> {{Thread.currentThread().getContextClassLoader()}} in the entire 
> {{flink-table}} module.



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to