[
https://issues.apache.org/jira/browse/FLINK-32970?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Spongebob updated FLINK-32970:
------------------------------
Description:
Firstly, the "connectors.jar" contains "test-connector" and I put it in user
libs.
Then, I started a tableEvironment in one operator function of
streamExecutionEnvironment.
In the tableEvironment I declared a table using the "test-connector".
Finally, I run the application and load the "connectors.jar" using "-C
connectors.jar", when the table's creation statement was executed, I got an
class not found exception which like below(please notice that if I put the
"connectors.jar" in flink lib, the application would run normally):
{code:java}
SLF4J: Found binding in
[jar:file:/data/hadoop-3.3.5/tmpdata/nm-local-dir/usercache/root/appcache/application_1690443774859_0439/filecache/13/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
Found binding in
[jar:file:/data/hadoop-3.3.5/share/hadoop/common/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.SLF4J: Actual binding is of type
[org.apache.logging.slf4j.Log4jLoggerFactory]java.util.concurrent.ExecutionException:
org.apache.flink.table.api.TableException: Failed to wait job finish at
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908) at
org.apache.flink.table.api.internal.TableResultImpl.awaitInternal(TableResultImpl.java:129)
at
org.apache.flink.table.api.internal.TableResultImpl.await(TableResultImpl.java:92)
at
com.xctech.cone.data.sql.model.runner.ModelRunner.executeStatementSet(ModelRunner.java:58)
at
com.xctech.cone.data.versionedStarRocks.MicroBatchModelRunner.run(MicroBatchModelRunner.java:60)
at
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:103)
at
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:25)
at
org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:54)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:82)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:57)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:29)
at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:56)
at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:29)
at
org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
at
org.apache.flink.streaming.api.functions.windowing.PassThroughAllWindowFunction.apply(PassThroughAllWindowFunction.java:35)
at
org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:48)
at
org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:34)
at
org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.emitWindowContents(WindowOperator.java:568)
at
org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.onProcessingTime(WindowOperator.java:524)
at
org.apache.flink.streaming.api.operators.InternalTimerServiceImpl.onProcessingTime(InternalTimerServiceImpl.java:284)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invokeProcessingTimeCallback(StreamTask.java:1693)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$null$22(StreamTask.java:1684)
at
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761)
at
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
java.lang.Thread.run(Thread.java:750)Caused by:
org.apache.flink.table.api.TableException: Failed to wait job finish at
org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:56)
at
org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.hasNext(TableResultImpl.java:370)
at
org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.isFirstRowReady(TableResultImpl.java:383)
at
org.apache.flink.table.api.internal.TableResultImpl.lambda$awaitInternal$1(TableResultImpl.java:116)
at
java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1640)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
... 1 moreCaused by: java.util.concurrent.ExecutionException:
org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908) at
org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:54)
... 7 moreCaused by:
org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at
org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
at
org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
at
java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616) at
java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258)
at
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
at
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92)
at
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
at
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47)
at akka.dispatch.OnComplete.internal(Future.scala:300) at
akka.dispatch.OnComplete.internal(Future.scala:297) at
akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at
akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65)
at
scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at
scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
at
scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
at
scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at
akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at
akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24)
at
akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23)
at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at
scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at
scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
at
akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81)
at
akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1067)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1703) at
java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:172)Caused
by: org.apache.flink.runtime.JobException: Recovery is suppressed by
NoRestartBackoffTimeStrategy at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
at
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
at
org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
at
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498) at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
at
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at
scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at
scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at
akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
akka.actor.Actor.aroundReceive(Actor.scala:537) at
akka.actor.Actor.aroundReceive$(Actor.scala:535) at
akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at
akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at
akka.actor.ActorCell.invoke(ActorCell.scala:548) at
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at
akka.dispatch.Mailbox.run(Mailbox.scala:231) at
akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 moreCaused by:
org.apache.flink.streaming.runtime.tasks.StreamTaskException: Cannot load user
class:
com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunctionClassLoader
info: URL ClassLoader:Class not resolvable through given classloader. at
org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:338)
at
org.apache.flink.streaming.runtime.tasks.OperatorChain.<init>(OperatorChain.java:155)
at
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.<init>(RegularOperatorChain.java:63)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.restoreInternal(StreamTask.java:666)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:654)
at
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:927)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
java.lang.Thread.run(Thread.java:750)Caused by:
java.lang.ClassNotFoundException:
com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunction at
java.net.URLClassLoader.findClass(URLClassLoader.java:387) at
java.lang.ClassLoader.loadClass(ClassLoader.java:418) at
org.apache.flink.util.FlinkUserCodeClassLoader.loadClassWithoutExceptionHandling(FlinkUserCodeClassLoader.java:64)
at
org.apache.flink.util.ChildFirstClassLoader.loadClassWithoutExceptionHandling(ChildFirstClassLoader.java:74)
at
org.apache.flink.util.FlinkUserCodeClassLoader.loadClass(FlinkUserCodeClassLoader.java:48)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351) at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.loadClass(FlinkUserCodeClassLoaders.java:172)
at java.lang.Class.forName0(Native Method) at
java.lang.Class.forName(Class.java:348) at
org.apache.flink.util.InstantiationUtil$ClassLoaderObjectInputStream.resolveClass(InstantiationUtil.java:78)
at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2011)
at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1875) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2209) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454) at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454) at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.readObject(ObjectInputStream.java:508) at
java.io.ObjectInputStream.readObject(ObjectInputStream.java:466) at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:617)
at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:602)
at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:589)
at
org.apache.flink.util.InstantiationUtil.readObjectFromConfig(InstantiationUtil.java:543)
at
org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:324)
... 9 more taskmanager.log 28532023-08-25 16:39:34,248 INFO
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] -
瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩2023-08-25 16:41:00,463 INFO
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] - 瑙﹀彂SR寰壒,
褰撳墠鍏ㄥ眬鐗堟湰鍙? 22032023-08-25 16:41:03,799 ERROR
com.xctech.cone.data.sql.model.runner.ModelRunner [] - 璇QL
ID鍒楄〃涔嬩竴鎵ц澶辫触: [1]2023-08-25 16:41:05,195 ERROR
org.apache.flink.runtime.util.ClusterUncaughtExceptionHandler [] - WARNING:
Thread 'Thread-8' produced an uncaught exception. If you want to fail on
uncaught exceptions, then configure cluster.uncaught-exception-handling
accordinglyjava.lang.IllegalStateException: Trying to access closed
classloader. Please check if you store classloaders directly or indirectly in
static fields. If the stacktrace suggests that the leak occurs in a third party
library and cannot be fixed immediately, you can disable this check with the
configuration 'classloader.check-leaked-classloader'. at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
~[hadoop-common-3.3.5.jar:?] taskmanager.out 26592023-08-25 16:39:34,248 INFO
瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩{A_SINK_1={1=SinkData(sqlId=1, pkBuffer=[[BOB]],
sinkTable=A_SINK_1)}}2023-08-25 16:41:00,463 INFO 瑙﹀彂SR寰壒, 褰撳墠鍏ㄥ眬鐗堟湰鍙?
22032023-08-25 16:41:03,799 ERROR 璇QL ID鍒楄〃涔嬩竴鎵ц澶辫触: [1]2023-08-25
16:41:05,195 ERROR WARNING: Thread 'Thread-8' produced an uncaught exception.
If you want to fail on uncaught exceptions, then configure
cluster.uncaught-exception-handling accordinglyjava.lang.IllegalStateException:
Trying to access closed classloader. Please check if you store classloaders
directly or indirectly in static fields. If the stacktrace suggests that the
leak occurs in a third party library and cannot be fixed immediately, you can
disable this check with the configuration
'classloader.check-leaked-classloader'. at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
~[hadoop-common-3.3.5.jar:?] VERSION*(
&container_1690443774859_0439_01_000002none?$?$data:BCFile.indexnone?p
data:TFile.indexnone?:66data:TFile.metanone?4 } ?觝懙锥9逜@捄酨 {code}
was:
Firstly, the "connectors.jar" contains "test-connector" and I put it in user
libs.
Then, I started a tableEvironment in one operator function of
streamExecutionEnvironment.
In the tableEvironment I declared a table using the "test-connector".
Finally, when the table's creation statement was executed, I got an class not
found exception which like below(please notice that if I put the
"connectors.jar" in flink lib, the application would run normally):
{code:java}
SLF4J: Found binding in
[jar:file:/data/hadoop-3.3.5/tmpdata/nm-local-dir/usercache/root/appcache/application_1690443774859_0439/filecache/13/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
Found binding in
[jar:file:/data/hadoop-3.3.5/share/hadoop/common/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.SLF4J: Actual binding is of type
[org.apache.logging.slf4j.Log4jLoggerFactory]java.util.concurrent.ExecutionException:
org.apache.flink.table.api.TableException: Failed to wait job finish at
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908) at
org.apache.flink.table.api.internal.TableResultImpl.awaitInternal(TableResultImpl.java:129)
at
org.apache.flink.table.api.internal.TableResultImpl.await(TableResultImpl.java:92)
at
com.xctech.cone.data.sql.model.runner.ModelRunner.executeStatementSet(ModelRunner.java:58)
at
com.xctech.cone.data.versionedStarRocks.MicroBatchModelRunner.run(MicroBatchModelRunner.java:60)
at
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:103)
at
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:25)
at
org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:54)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:82)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:57)
at
org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:29)
at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:56)
at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:29)
at
org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
at
org.apache.flink.streaming.api.functions.windowing.PassThroughAllWindowFunction.apply(PassThroughAllWindowFunction.java:35)
at
org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:48)
at
org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:34)
at
org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.emitWindowContents(WindowOperator.java:568)
at
org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.onProcessingTime(WindowOperator.java:524)
at
org.apache.flink.streaming.api.operators.InternalTimerServiceImpl.onProcessingTime(InternalTimerServiceImpl.java:284)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invokeProcessingTimeCallback(StreamTask.java:1693)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$null$22(StreamTask.java:1684)
at
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761)
at
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
java.lang.Thread.run(Thread.java:750)Caused by:
org.apache.flink.table.api.TableException: Failed to wait job finish at
org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:56)
at
org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.hasNext(TableResultImpl.java:370)
at
org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.isFirstRowReady(TableResultImpl.java:383)
at
org.apache.flink.table.api.internal.TableResultImpl.lambda$awaitInternal$1(TableResultImpl.java:116)
at
java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1640)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
... 1 moreCaused by: java.util.concurrent.ExecutionException:
org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908) at
org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:54)
... 7 moreCaused by:
org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
at
org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
at
org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
at
java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616) at
java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258)
at
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
at
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92)
at
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
at
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
at
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
at
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975) at
org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47)
at akka.dispatch.OnComplete.internal(Future.scala:300) at
akka.dispatch.OnComplete.internal(Future.scala:297) at
akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at
akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65)
at
scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at
scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
at
scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
at
scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at
akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at
akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24)
at
akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23)
at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at
scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at
scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
at
akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81)
at
akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at
akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1067)
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1703) at
java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:172)Caused
by: org.apache.flink.runtime.JobException: Recovery is suppressed by
NoRestartBackoffTimeStrategy at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
at
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
at
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
at
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
at
org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
at
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498) at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
at
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
at
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
at
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at
scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at
scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at
akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
akka.actor.Actor.aroundReceive(Actor.scala:537) at
akka.actor.Actor.aroundReceive$(Actor.scala:535) at
akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at
akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at
akka.actor.ActorCell.invoke(ActorCell.scala:548) at
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at
akka.dispatch.Mailbox.run(Mailbox.scala:231) at
akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 moreCaused by:
org.apache.flink.streaming.runtime.tasks.StreamTaskException: Cannot load user
class:
com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunctionClassLoader
info: URL ClassLoader:Class not resolvable through given classloader. at
org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:338)
at
org.apache.flink.streaming.runtime.tasks.OperatorChain.<init>(OperatorChain.java:155)
at
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.<init>(RegularOperatorChain.java:63)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.restoreInternal(StreamTask.java:666)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:654)
at
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:927)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
java.lang.Thread.run(Thread.java:750)Caused by:
java.lang.ClassNotFoundException:
com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunction at
java.net.URLClassLoader.findClass(URLClassLoader.java:387) at
java.lang.ClassLoader.loadClass(ClassLoader.java:418) at
org.apache.flink.util.FlinkUserCodeClassLoader.loadClassWithoutExceptionHandling(FlinkUserCodeClassLoader.java:64)
at
org.apache.flink.util.ChildFirstClassLoader.loadClassWithoutExceptionHandling(ChildFirstClassLoader.java:74)
at
org.apache.flink.util.FlinkUserCodeClassLoader.loadClass(FlinkUserCodeClassLoader.java:48)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351) at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.loadClass(FlinkUserCodeClassLoaders.java:172)
at java.lang.Class.forName0(Native Method) at
java.lang.Class.forName(Class.java:348) at
org.apache.flink.util.InstantiationUtil$ClassLoaderObjectInputStream.resolveClass(InstantiationUtil.java:78)
at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2011)
at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1875) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2209) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454) at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454) at
java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378) at
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236) at
java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
java.io.ObjectInputStream.readObject(ObjectInputStream.java:508) at
java.io.ObjectInputStream.readObject(ObjectInputStream.java:466) at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:617)
at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:602)
at
org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:589)
at
org.apache.flink.util.InstantiationUtil.readObjectFromConfig(InstantiationUtil.java:543)
at
org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:324)
... 9 more taskmanager.log 28532023-08-25 16:39:34,248 INFO
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] -
瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩2023-08-25 16:41:00,463 INFO
com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] - 瑙﹀彂SR寰壒,
褰撳墠鍏ㄥ眬鐗堟湰鍙? 22032023-08-25 16:41:03,799 ERROR
com.xctech.cone.data.sql.model.runner.ModelRunner [] - 璇QL
ID鍒楄〃涔嬩竴鎵ц澶辫触: [1]2023-08-25 16:41:05,195 ERROR
org.apache.flink.runtime.util.ClusterUncaughtExceptionHandler [] - WARNING:
Thread 'Thread-8' produced an uncaught exception. If you want to fail on
uncaught exceptions, then configure cluster.uncaught-exception-handling
accordinglyjava.lang.IllegalStateException: Trying to access closed
classloader. Please check if you store classloaders directly or indirectly in
static fields. If the stacktrace suggests that the leak occurs in a third party
library and cannot be fixed immediately, you can disable this check with the
configuration 'classloader.check-leaked-classloader'. at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
~[hadoop-common-3.3.5.jar:?] taskmanager.out 26592023-08-25 16:39:34,248 INFO
瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩{A_SINK_1={1=SinkData(sqlId=1, pkBuffer=[[BOB]],
sinkTable=A_SINK_1)}}2023-08-25 16:41:00,463 INFO 瑙﹀彂SR寰壒, 褰撳墠鍏ㄥ眬鐗堟湰鍙?
22032023-08-25 16:41:03,799 ERROR 璇QL ID鍒楄〃涔嬩竴鎵ц澶辫触: [1]2023-08-25
16:41:05,195 ERROR WARNING: Thread 'Thread-8' produced an uncaught exception.
If you want to fail on uncaught exceptions, then configure
cluster.uncaught-exception-handling accordinglyjava.lang.IllegalStateException:
Trying to access closed classloader. Please check if you store classloaders
directly or indirectly in static fields. If the stacktrace suggests that the
leak occurs in a third party library and cannot be fixed immediately, you can
disable this check with the configuration
'classloader.check-leaked-classloader'. at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
~[flink-dist_2.12-1.14.5.jar:1.14.5] at
org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
~[hadoop-common-3.3.5.jar:?] at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
~[hadoop-common-3.3.5.jar:?] VERSION*(
&container_1690443774859_0439_01_000002none?$?$data:BCFile.indexnone?p
data:TFile.indexnone?:66data:TFile.metanone?4 } ?觝懙锥9逜@捄酨 {code}
> could not load external class using "-C" option
> -----------------------------------------------
>
> Key: FLINK-32970
> URL: https://issues.apache.org/jira/browse/FLINK-32970
> Project: Flink
> Issue Type: Bug
> Components: API / Core
> Affects Versions: 1.14.6
> Reporter: Spongebob
> Priority: Major
>
> Firstly, the "connectors.jar" contains "test-connector" and I put it in user
> libs.
> Then, I started a tableEvironment in one operator function of
> streamExecutionEnvironment.
> In the tableEvironment I declared a table using the "test-connector".
> Finally, I run the application and load the "connectors.jar" using "-C
> connectors.jar", when the table's creation statement was executed, I got an
> class not found exception which like below(please notice that if I put the
> "connectors.jar" in flink lib, the application would run normally):
> {code:java}
> SLF4J: Found binding in
> [jar:file:/data/hadoop-3.3.5/tmpdata/nm-local-dir/usercache/root/appcache/application_1690443774859_0439/filecache/13/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
> Found binding in
> [jar:file:/data/hadoop-3.3.5/share/hadoop/common/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J:
> See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.SLF4J: Actual binding is of type
> [org.apache.logging.slf4j.Log4jLoggerFactory]java.util.concurrent.ExecutionException:
> org.apache.flink.table.api.TableException: Failed to wait job finish
> at
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
> at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908)
> at
> org.apache.flink.table.api.internal.TableResultImpl.awaitInternal(TableResultImpl.java:129)
> at
> org.apache.flink.table.api.internal.TableResultImpl.await(TableResultImpl.java:92)
> at
> com.xctech.cone.data.sql.model.runner.ModelRunner.executeStatementSet(ModelRunner.java:58)
> at
> com.xctech.cone.data.versionedStarRocks.MicroBatchModelRunner.run(MicroBatchModelRunner.java:60)
> at
> com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:103)
> at
> com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction.invoke(ExecuteSQLFunction.java:25)
> at
> org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:54)
> at
> org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:82)
> at
> org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:57)
> at
> org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:29)
> at
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:56)
> at
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:29)
> at
> org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
> at
> org.apache.flink.streaming.api.functions.windowing.PassThroughAllWindowFunction.apply(PassThroughAllWindowFunction.java:35)
> at
> org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:48)
> at
> org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction.process(InternalSingleValueAllWindowFunction.java:34)
> at
> org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.emitWindowContents(WindowOperator.java:568)
> at
> org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.onProcessingTime(WindowOperator.java:524)
> at
> org.apache.flink.streaming.api.operators.InternalTimerServiceImpl.onProcessingTime(InternalTimerServiceImpl.java:284)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invokeProcessingTimeCallback(StreamTask.java:1693)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$null$22(StreamTask.java:1684)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
> at
> org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90)
> at
> org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
> at
> org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
> at
> org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761)
> at
> org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
> at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937)
> at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
> org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
> java.lang.Thread.run(Thread.java:750)Caused by:
> org.apache.flink.table.api.TableException: Failed to wait job finish at
> org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:56)
> at
> org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.hasNext(TableResultImpl.java:370)
> at
> org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.isFirstRowReady(TableResultImpl.java:383)
> at
> org.apache.flink.table.api.internal.TableResultImpl.lambda$awaitInternal$1(TableResultImpl.java:116)
> at
> java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1640)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> ... 1 moreCaused by: java.util.concurrent.ExecutionException:
> org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
> at
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
> at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908)
> at
> org.apache.flink.table.api.internal.InsertResultIterator.hasNext(InsertResultIterator.java:54)
> ... 7 moreCaused by:
> org.apache.flink.runtime.client.JobExecutionException: Job execution failed.
> at
> org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144)
> at
> org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137)
> at
> java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616)
> at
> java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:591)
> at
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
> at
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
> at
> org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258)
> at
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
> at
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
> at
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
> at
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
> at
> org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389)
> at
> org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93)
> at
> org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
> at
> org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92)
> at
> java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:774)
> at
> java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:750)
> at
> java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:488)
> at
> java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1975)
> at
> org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47)
> at akka.dispatch.OnComplete.internal(Future.scala:300) at
> akka.dispatch.OnComplete.internal(Future.scala:297) at
> akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at
> akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at
> scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
> org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65)
> at
> scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68)
> at
> scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284)
> at
> scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284)
> at
> scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284)
> at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at
> akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24)
> at
> akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23)
> at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at
> scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at
> scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at
> scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at
> akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63)
> at
> akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100)
> at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
> at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81)
> at
> akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100)
> at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49)
> at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48)
> at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
> at
> java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1067)
> at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1703) at
> java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:172)Caused
> by: org.apache.flink.runtime.JobException: Recovery is suppressed by
> NoRestartBackoffTimeStrategy at
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
> at
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
> at
> org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
> at
> org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
> at
> org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
> at
> org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
> at
> org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
> at
> org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498) at
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
> at
> org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
> at
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
> at
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
> at
> org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
> at
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
> at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at
> akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at
> scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at
> scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at
> akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at
> akka.actor.Actor.aroundReceive(Actor.scala:537) at
> akka.actor.Actor.aroundReceive$(Actor.scala:535) at
> akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at
> akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at
> akka.actor.ActorCell.invoke(ActorCell.scala:548) at
> akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at
> akka.dispatch.Mailbox.run(Mailbox.scala:231) at
> akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 moreCaused by:
> org.apache.flink.streaming.runtime.tasks.StreamTaskException: Cannot load
> user class:
> com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunctionClassLoader
> info: URL ClassLoader:Class not resolvable through given classloader.
> at
> org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:338)
> at
> org.apache.flink.streaming.runtime.tasks.OperatorChain.<init>(OperatorChain.java:155)
> at
> org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.<init>(RegularOperatorChain.java:63)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.restoreInternal(StreamTask.java:666)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.restore(StreamTask.java:654)
> at
> org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
> at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:927)
> at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at
> org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at
> java.lang.Thread.run(Thread.java:750)Caused by:
> java.lang.ClassNotFoundException:
> com.xctech.cone.connector.starrocks.table.StarRocksBatchSinkFunction at
> java.net.URLClassLoader.findClass(URLClassLoader.java:387) at
> java.lang.ClassLoader.loadClass(ClassLoader.java:418) at
> org.apache.flink.util.FlinkUserCodeClassLoader.loadClassWithoutExceptionHandling(FlinkUserCodeClassLoader.java:64)
> at
> org.apache.flink.util.ChildFirstClassLoader.loadClassWithoutExceptionHandling(ChildFirstClassLoader.java:74)
> at
> org.apache.flink.util.FlinkUserCodeClassLoader.loadClass(FlinkUserCodeClassLoader.java:48)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:351) at
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.loadClass(FlinkUserCodeClassLoaders.java:172)
> at java.lang.Class.forName0(Native Method) at
> java.lang.Class.forName(Class.java:348) at
> org.apache.flink.util.InstantiationUtil$ClassLoaderObjectInputStream.resolveClass(InstantiationUtil.java:78)
> at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2011)
> at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1875) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2209)
> at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454)
> at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378)
> at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236)
> at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2454)
> at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2378)
> at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2236)
> at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1692) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:508) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:466) at
> org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:617)
> at
> org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:602)
> at
> org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:589)
> at
> org.apache.flink.util.InstantiationUtil.readObjectFromConfig(InstantiationUtil.java:543)
> at
> org.apache.flink.streaming.api.graph.StreamConfig.getStreamOperatorFactory(StreamConfig.java:324)
> ... 9 more taskmanager.log 28532023-08-25 16:39:34,248 INFO
> com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] -
> 瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩2023-08-25 16:41:00,463 INFO
> com.xctech.cone.data.versionedStarRocks.ExecuteSQLFunction [] - 瑙﹀彂SR寰壒,
> 褰撳墠鍏ㄥ眬鐗堟湰鍙? 22032023-08-25 16:41:03,799 ERROR
> com.xctech.cone.data.sql.model.runner.ModelRunner [] - 璇QL
> ID鍒楄〃涔嬩竴鎵ц澶辫触: [1]2023-08-25 16:41:05,195 ERROR
> org.apache.flink.runtime.util.ClusterUncaughtExceptionHandler [] - WARNING:
> Thread 'Thread-8' produced an uncaught exception. If you want to fail on
> uncaught exceptions, then configure cluster.uncaught-exception-handling
> accordinglyjava.lang.IllegalStateException: Trying to access closed
> classloader. Please check if you store classloaders directly or indirectly in
> static fields. If the stacktrace suggests that the leak occurs in a third
> party library and cannot be fixed immediately, you can disable this check
> with the configuration 'classloader.check-leaked-classloader'. at
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
> ~[flink-dist_2.12-1.14.5.jar:1.14.5] at
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
> ~[flink-dist_2.12-1.14.5.jar:1.14.5] at
> org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
> ~[hadoop-common-3.3.5.jar:?] taskmanager.out 26592023-08-25 16:39:34,248
> INFO 瀹氬埗鍖?starRocks鐗堟湰鍖栧井鎵瑰悓姝ュ凡鍚姩{A_SINK_1={1=SinkData(sqlId=1,
> pkBuffer=[[BOB]], sinkTable=A_SINK_1)}}2023-08-25 16:41:00,463 INFO
> 瑙﹀彂SR寰壒, 褰撳墠鍏ㄥ眬鐗堟湰鍙? 22032023-08-25 16:41:03,799 ERROR 璇QL ID鍒楄〃涔嬩竴鎵ц澶辫触:
> [1]2023-08-25 16:41:05,195 ERROR WARNING: Thread 'Thread-8' produced an
> uncaught exception. If you want to fail on uncaught exceptions, then
> configure cluster.uncaught-exception-handling
> accordinglyjava.lang.IllegalStateException: Trying to access closed
> classloader. Please check if you store classloaders directly or indirectly in
> static fields. If the stacktrace suggests that the leak occurs in a third
> party library and cannot be fixed immediately, you can disable this check
> with the configuration 'classloader.check-leaked-classloader'. at
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:164)
> ~[flink-dist_2.12-1.14.5.jar:1.14.5] at
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:183)
> ~[flink-dist_2.12-1.14.5.jar:1.14.5] at
> org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2839)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3113)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:3072)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:3045)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.loadProps(Configuration.java:2923)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2905)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.get(Configuration.java:1247)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1864)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1841)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
> ~[hadoop-common-3.3.5.jar:?] at
> org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
> ~[hadoop-common-3.3.5.jar:?] VERSION*(
> &container_1690443774859_0439_01_000002none?$?$data:BCFile.indexnone?p
> data:TFile.indexnone?:66data:TFile.metanone?4 } ?觝懙锥9逜@捄酨
> {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)