lucasberlang commented on issue #7185:
URL: https://github.com/apache/hudi/issues/7185#issuecomment-1313294770

   Hi @hussein-awala,
   
   First of all thank you for your answer.
   I have added the dependency to the dockerfile and the error has now changed, 
it seems that I am not getting the access keys to aws correctly, although I 
have them in the properties and in the configmap.
   ```Dockerfile
   ARG FLINK_VERSION
   ARG SCALA_VERSION
   FROM flink:${FLINK_VERSION}-scala_${SCALA_VERSION}
   ARG FLINK_HADOOP_VERSION
   ARG GCS_CONNECTOR_VERSION
   
   RUN test -n "$FLINK_HADOOP_VERSION"
   RUN test -n "$GCS_CONNECTOR_VERSION"
   
   ARG HUDI_HADOOP_JAR_NAME=hudi-flink1.15-bundle-0.12.0.jar
   ARG 
HUDI_HADOOP_JAR_URI=https://repo.maven.apache.org/maven2/org/apache/hudi/hudi-flink1.15-bundle/0.12.0/hudi-flink1.15-bundle-0.12.0.jar
   
   RUN echo "Downloading ${HUDI_HADOOP_JAR_URI}" && \
     wget -q -O /opt/flink/lib/${HUDI_HADOOP_JAR_NAME} ${HUDI_HADOOP_JAR_URI}
   RUN echo "Downloading 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-aws/0.12.0/hudi-aws-0.12.0.jar";
 && \
     wget -q -O /opt/flink/lib/hudi-aws-0.12.0.jar 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-aws/0.12.0/hudi-aws-0.12.0.jar
   
   RUN mkdir -p /opt/flink/plugins/flink-s3-fs-hadoop/ && cp 
/opt/flink/opt/flink-s3-fs-hadoop-1.15.0.jar 
/opt/flink/plugins/flink-s3-fs-hadoop/ && cp 
/opt/flink/opt/flink-s3-fs-hadoop-1.15.0.jar /opt/flink/lib/
   ```
   **Stacktrace**
   
   ```
   org.apache.flink.runtime.rpc.akka.exceptions.AkkaRpcException: Could not 
start RpcEndpoint jobmanager_2.
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:617)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleControlMessage(AkkaRpcActor.java:185)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at scala.PartialFunction.applyOrElse(PartialFunction.scala:123) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.actor.Actor.aroundReceive(Actor.scala:537) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.actor.Actor.aroundReceive$(Actor.scala:535) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.actor.ActorCell.invoke(ActorCell.scala:548) 
~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) 
[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.dispatch.Mailbox.run(Mailbox.scala:231) 
[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at akka.dispatch.Mailbox.exec(Mailbox.scala:243) 
[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at java.util.concurrent.ForkJoinTask.doExec(Unknown Source) [?:?]
        at java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(Unknown 
Source) [?:?]
        at java.util.concurrent.ForkJoinPool.scan(Unknown Source) [?:?]
        at java.util.concurrent.ForkJoinPool.runWorker(Unknown Source) [?:?]
        at java.util.concurrent.ForkJoinWorkerThread.run(Unknown Source) [?:?]
   Caused by: org.apache.flink.runtime.jobmaster.JobMasterException: Could not 
start the JobMaster.
        at 
org.apache.flink.runtime.jobmaster.JobMaster.onStart(JobMaster.java:390) 
~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.RpcEndpoint.internalCallOnStart(RpcEndpoint.java:181)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.lambda$start$0(AkkaRpcActor.java:612)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:611)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        ... 21 more
   Caused by: org.apache.flink.util.FlinkRuntimeException: Failed to start the 
operator coordinators
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:169)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startAllOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:82)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.SchedulerBase.startScheduling(SchedulerBase.java:624)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startScheduling(JobMaster.java:1010)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startJobExecution(JobMaster.java:927)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.onStart(JobMaster.java:388) 
~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.RpcEndpoint.internalCallOnStart(RpcEndpoint.java:181)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.lambda$start$0(AkkaRpcActor.java:612)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:611)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        ... 21 more
   Caused by: org.apache.hudi.exception.HoodieIOException: Failed to get 
instance of org.apache.hadoop.fs.FileSystem
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:109) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:100) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.util.StreamerUtil.tableExists(StreamerUtil.java:338) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:307) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:294) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.sink.StreamWriteOperatorCoordinator.start(StreamWriteOperatorCoordinator.java:179)
 ~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.flink.runtime.operators.coordination.OperatorCoordinatorHolder.start(OperatorCoordinatorHolder.java:194)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:164)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startAllOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:82)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.SchedulerBase.startScheduling(SchedulerBase.java:624)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startScheduling(JobMaster.java:1010)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startJobExecution(JobMaster.java:927)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.onStart(JobMaster.java:388) 
~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.RpcEndpoint.internalCallOnStart(RpcEndpoint.java:181)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.lambda$start$0(AkkaRpcActor.java:612)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:611)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        ... 21 more
   Caused by: java.nio.file.AccessDeniedException: 
euw1-bluetab-general-data-pro: 
org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException: No AWS Credentials 
provided by SimpleAWSCredentialsProvider EnvironmentVariableCredentialsProvider 
InstanceProfileCredentialsProvider : com.amazonaws.SdkClientException: The 
requested metadata is not found at 
http://169.254.169.254/latest/meta-data/iam/security-credentials/
        at 
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java:187) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.once(Invoker.java:111) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.lambda$retry$3(Invoker.java:265) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java:322) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:261) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:236) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.s3a.S3AFileSystem.verifyBucketExists(S3AFileSystem.java:391)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.s3a.S3AFileSystem.initialize(S3AFileSystem.java:322) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3375) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:125) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3424) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3392) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:485) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.Path.getFileSystem(Path.java:365) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:107) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:100) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.util.StreamerUtil.tableExists(StreamerUtil.java:338) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:307) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:294) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.sink.StreamWriteOperatorCoordinator.start(StreamWriteOperatorCoordinator.java:179)
 ~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.flink.runtime.operators.coordination.OperatorCoordinatorHolder.start(OperatorCoordinatorHolder.java:194)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:164)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startAllOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:82)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.SchedulerBase.startScheduling(SchedulerBase.java:624)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startScheduling(JobMaster.java:1010)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startJobExecution(JobMaster.java:927)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.onStart(JobMaster.java:388) 
~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.RpcEndpoint.internalCallOnStart(RpcEndpoint.java:181)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.lambda$start$0(AkkaRpcActor.java:612)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:611)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        ... 21 more
   Caused by: org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException: No AWS 
Credentials provided by SimpleAWSCredentialsProvider 
EnvironmentVariableCredentialsProvider InstanceProfileCredentialsProvider : 
com.amazonaws.SdkClientException: The requested metadata is not found at 
http://169.254.169.254/latest/meta-data/iam/security-credentials/
        at 
org.apache.hadoop.fs.s3a.AWSCredentialProviderList.getCredentials(AWSCredentialProviderList.java:159)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.getCredentialsFromContext(AmazonHttpClient.java:1257)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.runBeforeRequestHandlers(AmazonHttpClient.java:833)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java:783)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java:770)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java:744)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java:704)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java:686)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:550) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:530) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:5259) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.getBucketRegionViaHeadRequest(AmazonS3Client.java:6220)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.fetchRegionFromCache(AmazonS3Client.java:6193)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:5244) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:5206) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.headBucket(AmazonS3Client.java:1438) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
com.amazonaws.services.s3.AmazonS3Client.doesBucketExist(AmazonS3Client.java:1374)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.s3a.S3AFileSystem.lambda$verifyBucketExists$1(S3AFileSystem.java:392)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.once(Invoker.java:109) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.lambda$retry$3(Invoker.java:265) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retryUntranslated(Invoker.java:322) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:261) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.s3a.Invoker.retry(Invoker.java:236) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.s3a.S3AFileSystem.verifyBucketExists(S3AFileSystem.java:391)
 ~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.s3a.S3AFileSystem.initialize(S3AFileSystem.java:322) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3375) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:125) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at 
org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3424) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3392) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:485) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hadoop.fs.Path.getFileSystem(Path.java:365) 
~[flink-s3-fs-hadoop-1.15.0.jar:1.15.0]
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:107) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.common.fs.FSUtils.getFs(FSUtils.java:100) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at org.apache.hudi.util.StreamerUtil.tableExists(StreamerUtil.java:338) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:307) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.util.StreamerUtil.initTableIfNotExists(StreamerUtil.java:294) 
~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.hudi.sink.StreamWriteOperatorCoordinator.start(StreamWriteOperatorCoordinator.java:179)
 ~[hudi-flink1.15-bundle-0.12.0.jar:0.12.0]
        at 
org.apache.flink.runtime.operators.coordination.OperatorCoordinatorHolder.start(OperatorCoordinatorHolder.java:194)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:164)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.DefaultOperatorCoordinatorHandler.startAllOperatorCoordinators(DefaultOperatorCoordinatorHandler.java:82)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.scheduler.SchedulerBase.startScheduling(SchedulerBase.java:624)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startScheduling(JobMaster.java:1010)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.startJobExecution(JobMaster.java:927)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.jobmaster.JobMaster.onStart(JobMaster.java:388) 
~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.RpcEndpoint.internalCallOnStart(RpcEndpoint.java:181)
 ~[flink-dist-1.15.0.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.lambda$start$0(AkkaRpcActor.java:612)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor$StoppedState.start(AkkaRpcActor.java:611)
 ~[flink-rpc-akka_d357944d-1e58-45e1-a223-4f7011ceaa4f.jar:1.15.0]
        ... 21 more
   ```
   Any idea why it could be?
   
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to