hello
???? ./yarn-session.sh -n 8 -jm 1024 -tm 1024 -s 4 -nm FlinkOnYarnSession
-d
???? ??flink1.10.0 CDH5.14
????????flink on yarn??yarn-session ??????????????
org.apache.flink.client.deployment.ClusterDeploymentException: Couldn't deploy
Yarn session cluster
at
org.apache.flink.yarn.YarnClusterDescriptor.deploySessionCluster(YarnClusterDescriptor.java:380)
at
org.apache.flink.yarn.cli.FlinkYarnSessionCli.run(FlinkYarnSessionCli.java:548)
at
org.apache.flink.yarn.cli.FlinkYarnSessionCli.lambda$main$5(FlinkYarnSessionCli.java:785)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
at
org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
at
org.apache.flink.yarn.cli.FlinkYarnSessionCli.main(FlinkYarnSessionCli.java:785)
Caused by: java.net.ConnectException: Call From master/192.168.1.20 to
slave1:8020 failed on connection exception: java.net.ConnectException:
Connection refused; For more details see:
http://wiki.apache.org/hadoop/ConnectionRefused
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:791)
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:731)
at org.apache.hadoop.ipc.Client.call(Client.java:1474)
at org.apache.hadoop.ipc.Client.call(Client.java:1401)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy12.getFileInfo(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:752)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy13.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1977)
at
org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1118)
at
org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:1114)
at
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at
org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1114)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1400)
at org.apache.hadoop.fs.FileUtil.checkDest(FileUtil.java:496)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:348)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:338)
at
org.apache.hadoop.fs.FileSystem.copyFromLocalFile(FileSystem.java:1907)
at org.apache.flink.yarn.Utils.uploadLocalFileToRemote(Utils.java:172)
at org.apache.flink.yarn.Utils.setupLocalResource(Utils.java:126)
at
org.apache.flink.yarn.YarnClusterDescriptor.setupSingleLocalResource(YarnClusterDescriptor.java:1062)
at
org.apache.flink.yarn.YarnClusterDescriptor.uploadAndRegisterFiles(YarnClusterDescriptor.java:1144)
at
org.apache.flink.yarn.YarnClusterDescriptor.startAppMaster(YarnClusterDescriptor.java:707)
at
org.apache.flink.yarn.YarnClusterDescriptor.deployInternal(YarnClusterDescriptor.java:488)
at
org.apache.flink.yarn.YarnClusterDescriptor.deploySessionCluster(YarnClusterDescriptor.java:373)
... 7 more
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at
sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
at
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
at
org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:609)
at
org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:707)
at org.apache.hadoop.ipc.Client$Connection.access$2800(Client.java:370)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1523)
at org.apache.hadoop.ipc.Client.call(Client.java:1440)
... 35 more
------------------ ???????? ------------------
??????: "[email protected]"<[email protected]>;
????????: 2020??5??29??(??????) ????2:49
??????: "user-zh"<[email protected]>;
????: ????: Re: flink1.10 on yarn ????
??????????>>> Caused by: org.apache.flink.runtime.JobException:
Recovery is suppressed
>>> by NoRestartBackoffTimeStrategy
????????????????flink
conf??????flink-conf.yaml??????????????????task??????????????????
???????? air23
?????????? 2020-05-29 14:34
???????? user-zh
?????? Re:Re: flink1.10 on yarn ????
????????flink????????????
public class WordCountStreamingByJava {
public static void main(String[] args) throws Exception {
// ????????????
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
// ????socket??????
DataStreamSource<String> source = env.socketTextStream("zongteng75", 9001,
"\n");
// ????????????
DataStream<WordWithCount> dataStream = source.flatMap(new
FlatMapFunction<String, WordWithCount>() {
@Override
public void flatMap(String line, Collector<WordWithCount> collector) throws
Exception {
System.out.println(line);
for (String word : line.split(" ")) {
collector.collect(new WordWithCount(word, 1));
}
}
}).keyBy("word")//??key????????
.timeWindow(Time.seconds(2),Time.seconds(2))//??????????????????????????????
.sum("count");//????????????????????????
// ????????????????
dataStream.print();
// ????????????
env.execute("Flink Streaming Word Count By Java");
}
??????????flink???????? ???????? ??????????????????
?? 2020-05-29 14:22:39??"tison" <[email protected]> ??????
>?????? execute ?????????????????????? main ???????????????? gist ????????x??
>
>Best,
>tison.
>
>
>tison <[email protected]> ??2020??5??29?????? ????2:21??????
>
>> ?????????????????????????????????? env.execute
>>
????????????????????????????????????????????????????????????????????????????????????????????client?cluster?????
>>
>> Best,
>> tison.
>>
>>
>> air23 <[email protected]> ??2020??5??29?????? ????1:38??????
>>
>>> cdh????flink1.10 on cdh yarn ?????????? ??1.7.2??????????????
>>> flink-shaded-hadoop-2-uber-2.6.5-10.0.jar ??????
>>> hadoop???????? export HADOOP_CONF_DIR=/etc/hadoop/conf
>>> ??????
>>>
>>>
>>>
>>>
>>>
>>>
>>>
>>> org.apache.flink.client.program.ProgramInvocationException: The
main
>>> method caused an error:
>>> org.apache.flink.client.program.ProgramInvocationException: Job
failed
>>> (JobID: e358699c1be6be1472078771e1fd027f)
>>>
>>> at
>>>
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:335)
>>>
>>> at
>>>
org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:205)
>>>
>>> at
>>>
org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:138)
>>>
>>> at
>>>
org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:662)
>>>
>>> at
>>> org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:210)
>>>
>>> at
>>>
org.apache.flink.client.cli.CliFrontend.parseParameters(CliFrontend.java:893)
>>>
>>> at
>>>
org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:966)
>>>
>>> at
java.security.AccessController.doPrivileged(Native Method)
>>>
>>> at
javax.security.auth.Subject.doAs(Subject.java:422)
>>>
>>> at
>>>
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
>>>
>>> at
>>>
org.apache.flink.runtime.security.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
>>>
>>> at
>>> org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:966)
>>>
>>> Caused by: java.util.concurrent.ExecutionException:
>>> org.apache.flink.client.program.ProgramInvocationException: Job
failed
>>> (JobID: e358699c1be6be1472078771e1fd027f)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895)
>>>
>>> at
>>>
org.apache.flink.streaming.api.environment.StreamContextEnvironment.execute(StreamContextEnvironment.java:83)
>>>
>>> at
>>>
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1620)
>>>
>>> at
>>> tt.WordCountStreamingByJava.main(WordCountStreamingByJava.java:36)
>>>
>>> at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>>
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>>
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at
java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at
>>>
org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:321)
>>>
>>> ... 11 more
>>>
>>> Caused by:
org.apache.flink.client.program.ProgramInvocationException:
>>> Job failed (JobID: e358699c1be6be1472078771e1fd027f)
>>>
>>> at
>>>
org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:112)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
>>>
>>> at
>>>
org.apache.flink.client.program.rest.RestClusterClient.lambda$pollResourceAsync$21(RestClusterClient.java:565)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)
>>>
>>> at
>>>
org.apache.flink.runtime.concurrent.FutureUtils.lambda$retryOperationWithDelay$8(FutureUtils.java:291)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:561)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:929)
>>>
>>> at
>>>
java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:442)
>>>
>>> at
>>>
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>>>
>>> at
>>>
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>>>
>>> at
java.lang.Thread.run(Thread.java:748)
>>>
>>> Caused by: org.apache.flink.runtime.client.JobExecutionException:
Job
>>> execution failed.
>>>
>>> at
>>>
org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
>>>
>>> at
>>>
org.apache.flink.client.deployment.ClusterClientJobClientAdapter.lambda$null$6(ClusterClientJobClientAdapter.java:110)
>>>
>>> ... 19 more
>>>
>>> Caused by: org.apache.flink.runtime.JobException: Recovery is
suppressed
>>> by NoRestartBackoffTimeStrategy
>>>
>>> at
>>>
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:110)
>>>
>>> at
>>>
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:76)
>>>
>>> at
>>>
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)
>>>
>>> at
>>>
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:186)
>>>
>>> at
>>>
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:180)
>>>
>>> at
>>>
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:496)
>>>
>>> at
>>>
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:380)
>>>
>>> at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>
>>> at
>>>
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>
>>> at
>>>
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>
>>> at
java.lang.reflect.Method.invoke(Method.java:498)
>>>
>>> at
>>>
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)
>>>
>>> at
>>>
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)
>>>
>>> at
>>>
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
>>>
>>> at
>>>
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)
>>>
>>> at
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
>>>
>>> at
akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
>>>
>>> at
>>> scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123)
>>>
>>> at akka.japi.pf
>>> .UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
>>>
>>> at
>>> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170)
>>>
>>> at
>>> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
>>>
>>> at
>>> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
>>>
>>> at
akka.actor.Actor$class.aroundReceive(Actor.scala:517)
>>>
>>> at
akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
>>>
>>> at
akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
>>>
>>> at
akka.actor.ActorCell.invoke(ActorCell.scala:561)
>>>
>>> at
akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
>>>
>>> at
akka.dispatch.Mailbox.run(Mailbox.scala:225)
>>>
>>> at
akka.dispatch.Mailbox.exec(Mailbox.scala:235)
>>>
>>> at
>>> akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>>>
>>> at
>>>
akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
>>>
>>> at
>>>
akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
>>>
>>> at
>>>
akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>>>
>>> Caused by: java.net.ConnectException: Connection refused
(Connection
>>> refused)
>>>
>>> at
java.net.PlainSocketImpl.socketConnect(Native Method)
>>>
>>> at java.net
>>>
.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
>>>
>>> at java.net
>>>
.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
>>>
>>> at java.net
>>> .AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
>>>
>>> at
java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
>>>
>>> at
java.net.Socket.connect(Socket.java:606)
>>>
>>> at
>>>
org.apache.flink.streaming.api.functions.source.SocketTextStreamFunction.run(SocketTextStreamFunction.java:97)
>>>
>>> at
>>>
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:100)
>>>
>>> at
>>>
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)
>>>
>>> at
>>>
org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:200)
>>
>>