devanshguptatrepp commented on issue #7261:
URL: https://github.com/apache/hudi/issues/7261#issuecomment-1327297039
Yes, I was able to retrieve these logs for hudi
```2022-11-25T10:10:02,485 WARN
cred.CredentialsLegacyConfigLocationProvider: Found the legacy config profiles
file at [/home/hadoop/.aws/config]. Please move it to the latest default
location [~/.aws/credentials].
2022-11-25T10:12:49,611 WARN cred.CredentialsLegacyConfigLocationProvider:
Found the legacy config profiles file at [/home/hadoop/.aws/config]. Please
move it to the latest default location [~/.aws/credentials].
2022-11-25T10:18:51,075 ERROR metastore.GlueMetastoreClientDelegate: Unable
to get database object:
com.amazonaws.SdkClientException: Unable to execute HTTP request:
readHandshakeRecord
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.handleRetryableException(AmazonHttpClient.java:1216)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java:1162)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java:811)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java:779)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java:753)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java:713)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java:695)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:559)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:539)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.doInvoke(AWSGlueClient.java:11444)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.invoke(AWSGlueClient.java:11411)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.invoke(AWSGlueClient.java:11400)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.executeGetDatabase(AWSGlueClient.java:4874)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.getDatabase(AWSGlueClient.java:4843)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.glue.catalog.metastore.GlueMetastoreClientDelegate.getDatabase(GlueMetastoreClientDelegate.java:267)
~[aws-glue-datacatalog-spark-client-3.6.0.jar:?]
at
com.amazonaws.glue.catalog.metastore.AWSCatalogMetastoreClient.getDatabase(AWSCatalogMetastoreClient.java:281)
~[aws-glue-datacatalog-spark-client-3.6.0.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_342]
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2350)
~[hive-metastore-2.3.9-amzn-2.jar:2.3.9-amzn-2]
at com.sun.proxy.$Proxy139.getDatabase(Unknown Source) ~[?:?]
at
org.apache.hudi.hive.HoodieHiveSyncClient.databaseExists(HoodieHiveSyncClient.java:176)
~[?:?]
at
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:185) ~[?:?]
at org.apache.hudi.hive.HiveSyncTool.doSync(HiveSyncTool.java:153)
~[?:?]
at
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:141) ~[?:?]
at
org.apache.hudi.sync.common.util.SyncUtilHelpers.runHoodieMetaSync(SyncUtilHelpers.java:56)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2(HoodieSparkSqlWriter.scala:648)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2$adapted(HoodieSparkSqlWriter.scala:647)
~[?:?]
at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)
~[scala-library-2.12.15.jar:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:647)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:734)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:338)
~[?:?]
at
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:183) ~[?:?]
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:124)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:860)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:390)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:363)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:239)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
com.trepp.zone.ZoneExecutionHelper.$anonfun$upsert$1(ZoneExecutionHelper.scala:98)
~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at
com.trepp.zone.ZoneExecutionHelper.upsert(ZoneExecutionHelper.scala:92) ~[?:?]
at
com.trepp.zone.Presentation.$anonfun$writeHudiObject$1(Presentation.scala:92)
~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at com.trepp.zone.Presentation.writeHudiObject(Presentation.scala:81)
~[?:?]
at
com.trepp.process.Executor.$anonfun$writeObject$2(Executor.scala:136) ~[?:?]
at
com.trepp.process.Executor.$anonfun$writeObject$2$adapted(Executor.scala:133)
~[?:?]
at scala.collection.immutable.List.foreach(List.scala:431)
~[scala-library-2.12.15.jar:?]
at
com.trepp.process.Executor.$anonfun$writeObject$1(Executor.scala:133) ~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at com.trepp.process.Executor.writeObject(Executor.scala:133) ~[?:?]
at
com.trepp.process.Executor$$anon$2.$anonfun$accept$2(Executor.scala:118) ~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at com.trepp.process.Executor$$anon$2.accept(Executor.scala:113) ~[?:?]
at com.trepp.process.Executor$$anon$2.accept(Executor.scala:111) ~[?:?]
at java.util.TreeMap.forEach(TreeMap.java:1005) ~[?:1.8.0_342]
at com.trepp.process.Executor.executeQuery(Executor.scala:111) ~[?:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$3(EtlImpl.scala:43) ~[?:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1(EtlImpl.scala:37) ~[?:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1$adapted(EtlImpl.scala:23)
~[?:?]
at
scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
~[scala-library-2.12.15.jar:?]
at
scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
~[scala-library-2.12.15.jar:?]
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
~[scala-library-2.12.15.jar:?]
at com.trepp.dataload.EtlImpl.executeProcess(EtlImpl.scala:23) ~[?:?]
at com.trepp.TreppClient$.$anonfun$main$1(TreppClient.scala:46) ~[?:?]
at scala.util.Try$.apply(Try.scala:213) ~[scala-library-2.12.15.jar:?]
at com.trepp.TreppClient$.main(TreppClient.scala:40) ~[?:?]
at com.trepp.TreppClient.main(TreppClient.scala) ~[?:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_342]
at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
Caused by: javax.net.ssl.SSLException: readHandshakeRecord
at
sun.security.ssl.SSLSocketImpl.readHandshakeRecord(SSLSocketImpl.java:1314)
~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:440)
~[?:1.8.0_342]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.createLayeredSocket(SSLConnectionSocketFactory.java:436)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.connectSocket(SSLConnectionSocketFactory.java:384)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.conn.ssl.SdkTLSSocketFactory.connectSocket(SdkTLSSocketFactory.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:376)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_342]
at
com.amazonaws.http.conn.ClientConnectionManagerFactory$Handler.invoke(ClientConnectionManagerFactory.java:76)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at com.amazonaws.http.conn.$Proxy24.connect(Unknown Source) ~[?:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:393)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:186)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java:72)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java:1343)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java:1154)
~[aws-java-sdk-bundle-1.12.170.jar:?]
... 113 more
Suppressed: java.net.SocketException: Broken pipe (Write failed)
at java.net.SocketOutputStream.socketWrite0(Native Method)
~[?:1.8.0_342]
at
java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:111)
~[?:1.8.0_342]
at
java.net.SocketOutputStream.write(SocketOutputStream.java:155) ~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketOutputRecord.encodeAlert(SSLSocketOutputRecord.java:81)
~[?:1.8.0_342]
at
sun.security.ssl.TransportContext.fatal(TransportContext.java:355)
~[?:1.8.0_342]
at
sun.security.ssl.TransportContext.fatal(TransportContext.java:267)
~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:443)
~[?:1.8.0_342]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.createLayeredSocket(SSLConnectionSocketFactory.java:436)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.connectSocket(SSLConnectionSocketFactory.java:384)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.conn.ssl.SdkTLSSocketFactory.connectSocket(SdkTLSSocketFactory.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:376)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498)
~[?:1.8.0_342]
at
com.amazonaws.http.conn.ClientConnectionManagerFactory$Handler.invoke(ClientConnectionManagerFactory.java:76)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at com.amazonaws.http.conn.$Proxy24.connect(Unknown Source)
~[?:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:393)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:186)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java:72)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java:1343)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java:1154)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.doExecute(AmazonHttpClient.java:811)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeWithTimer(AmazonHttpClient.java:779)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.execute(AmazonHttpClient.java:753)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.access$500(AmazonHttpClient.java:713)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutionBuilderImpl.execute(AmazonHttpClient.java:695)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:559)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:539)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.doInvoke(AWSGlueClient.java:11444)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.invoke(AWSGlueClient.java:11411)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.invoke(AWSGlueClient.java:11400)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.executeGetDatabase(AWSGlueClient.java:4874)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.services.glue.AWSGlueClient.getDatabase(AWSGlueClient.java:4843)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.glue.catalog.metastore.GlueMetastoreClientDelegate.getDatabase(GlueMetastoreClientDelegate.java:267)
~[aws-glue-datacatalog-spark-client-3.6.0.jar:?]
at
com.amazonaws.glue.catalog.metastore.AWSCatalogMetastoreClient.getDatabase(AWSCatalogMetastoreClient.java:281)
~[aws-glue-datacatalog-spark-client-3.6.0.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498)
~[?:1.8.0_342]
at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2350)
~[hive-metastore-2.3.9-amzn-2.jar:2.3.9-amzn-2]
at com.sun.proxy.$Proxy139.getDatabase(Unknown Source) ~[?:?]
at
org.apache.hudi.hive.HoodieHiveSyncClient.databaseExists(HoodieHiveSyncClient.java:176)
~[?:?]
at
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:185) ~[?:?]
at
org.apache.hudi.hive.HiveSyncTool.doSync(HiveSyncTool.java:153) ~[?:?]
at
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:141) ~[?:?]
at
org.apache.hudi.sync.common.util.SyncUtilHelpers.runHoodieMetaSync(SyncUtilHelpers.java:56)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2(HoodieSparkSqlWriter.scala:648)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.$anonfun$metaSync$2$adapted(HoodieSparkSqlWriter.scala:647)
~[?:?]
at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)
~[scala-library-2.12.15.jar:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:647)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:734)
~[?:?]
at
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:338)
~[?:?]
at
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:183) ~[?:?]
at
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)
~[spark-catalyst_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.execution.QueryExecution.assertCommandExecuted(QueryExecution.scala:124)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:860)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:390)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:363)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:239)
~[spark-sql_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
com.trepp.zone.ZoneExecutionHelper.$anonfun$upsert$1(ZoneExecutionHelper.scala:98)
~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at
com.trepp.zone.ZoneExecutionHelper.upsert(ZoneExecutionHelper.scala:92) ~[?:?]
at
com.trepp.zone.Presentation.$anonfun$writeHudiObject$1(Presentation.scala:92)
~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at
com.trepp.zone.Presentation.writeHudiObject(Presentation.scala:81) ~[?:?]
at
com.trepp.process.Executor.$anonfun$writeObject$2(Executor.scala:136) ~[?:?]
at
com.trepp.process.Executor.$anonfun$writeObject$2$adapted(Executor.scala:133)
~[?:?]
at scala.collection.immutable.List.foreach(List.scala:431)
~[scala-library-2.12.15.jar:?]
at
com.trepp.process.Executor.$anonfun$writeObject$1(Executor.scala:133) ~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at com.trepp.process.Executor.writeObject(Executor.scala:133)
~[?:?]
at
com.trepp.process.Executor$$anon$2.$anonfun$accept$2(Executor.scala:118) ~[?:?]
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
~[scala-library-2.12.15.jar:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at
com.trepp.process.Executor$$anon$2.accept(Executor.scala:113) ~[?:?]
at
com.trepp.process.Executor$$anon$2.accept(Executor.scala:111) ~[?:?]
at java.util.TreeMap.forEach(TreeMap.java:1005) ~[?:1.8.0_342]
at com.trepp.process.Executor.executeQuery(Executor.scala:111)
~[?:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$3(EtlImpl.scala:43) ~[?:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1(EtlImpl.scala:37) ~[?:?]
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1$adapted(EtlImpl.scala:23)
~[?:?]
at
scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
~[scala-library-2.12.15.jar:?]
at
scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
~[scala-library-2.12.15.jar:?]
at
scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
~[scala-library-2.12.15.jar:?]
at com.trepp.dataload.EtlImpl.executeProcess(EtlImpl.scala:23)
~[?:?]
at com.trepp.TreppClient$.$anonfun$main$1(TreppClient.scala:46)
~[?:?]
at scala.util.Try$.apply(Try.scala:213)
~[scala-library-2.12.15.jar:?]
at com.trepp.TreppClient$.main(TreppClient.scala:40) ~[?:?]
at com.trepp.TreppClient.main(TreppClient.scala) ~[?:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498)
~[?:1.8.0_342]
at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
~[spark-core_2.12-3.3.0-amzn-0.jar:3.3.0-amzn-0]
Caused by: java.net.SocketException: Broken pipe (Write failed)
at java.net.SocketOutputStream.socketWrite0(Native Method)
~[?:1.8.0_342]
at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:111)
~[?:1.8.0_342]
at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketOutputRecord.flush(SSLSocketOutputRecord.java:251)
~[?:1.8.0_342]
at
sun.security.ssl.HandshakeOutStream.flush(HandshakeOutStream.java:89)
~[?:1.8.0_342]
at
sun.security.ssl.Finished$T12FinishedProducer.onProduceFinished(Finished.java:399)
~[?:1.8.0_342]
at
sun.security.ssl.Finished$T12FinishedProducer.produce(Finished.java:374)
~[?:1.8.0_342]
at sun.security.ssl.SSLHandshake.produce(SSLHandshake.java:421)
~[?:1.8.0_342]
at
sun.security.ssl.ServerHelloDone$ServerHelloDoneConsumer.consume(ServerHelloDone.java:182)
~[?:1.8.0_342]
at sun.security.ssl.SSLHandshake.consume(SSLHandshake.java:377)
~[?:1.8.0_342]
at
sun.security.ssl.HandshakeContext.dispatch(HandshakeContext.java:444)
~[?:1.8.0_342]
at
sun.security.ssl.HandshakeContext.dispatch(HandshakeContext.java:422)
~[?:1.8.0_342]
at
sun.security.ssl.TransportContext.dispatch(TransportContext.java:182)
~[?:1.8.0_342]
at sun.security.ssl.SSLTransport.decode(SSLTransport.java:152)
~[?:1.8.0_342]
at sun.security.ssl.SSLSocketImpl.decode(SSLSocketImpl.java:1397)
~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketImpl.readHandshakeRecord(SSLSocketImpl.java:1305)
~[?:1.8.0_342]
at
sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:440)
~[?:1.8.0_342]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.createLayeredSocket(SSLConnectionSocketFactory.java:436)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.conn.ssl.SSLConnectionSocketFactory.connectSocket(SSLConnectionSocketFactory.java:384)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.conn.ssl.SdkTLSSocketFactory.connectSocket(SdkTLSSocketFactory.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:142)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:376)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_342]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_342]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_342]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_342]
at
com.amazonaws.http.conn.ClientConnectionManagerFactory$Handler.invoke(ClientConnectionManagerFactory.java:76)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at com.amazonaws.http.conn.$Proxy24.connect(Unknown Source) ~[?:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:393)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:186)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.thirdparty.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.apache.client.impl.SdkHttpClient.execute(SdkHttpClient.java:72)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeOneRequest(AmazonHttpClient.java:1343)
~[aws-java-sdk-bundle-1.12.170.jar:?]
at
com.amazonaws.http.AmazonHttpClient$RequestExecutor.executeHelper(AmazonHttpClient.java:1154)
~[aws-java-sdk-bundle-1.12.170.jar:?]
... 113 more
2022-11-25T10:20:46,043 ERROR scheduler.AsyncEventQueue: Listener
EventLoggingListener threw an exception
java.io.IOException: All datanodes
[DatanodeInfoWithStorage[10.73.100.62:9866,DS-9ed76e4f-7553-443a-9ba9-7fd0c3f2801b,DISK]]
are bad. Aborting...
at
org.apache.hadoop.hdfs.DataStreamer.handleBadDatanode(DataStreamer.java:1561)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1495)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1481)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1256)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:667)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
2022-11-25T10:20:46,051 ERROR scheduler.AsyncEventQueue: Listener
EventLoggingListener threw an exception
java.io.IOException: All datanodes
[DatanodeInfoWithStorage[10.73.100.62:9866,DS-9ed76e4f-7553-443a-9ba9-7fd0c3f2801b,DISK]]
are bad. Aborting...
at
org.apache.hadoop.hdfs.DataStreamer.handleBadDatanode(DataStreamer.java:1561)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1495)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1481)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at
org.apache.hadoop.hdfs.DataStreamer.processDatanodeOrExternalError(DataStreamer.java:1256)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:667)
~[hadoop-client-api-3.2.1-amzn-8.jar:?]
java.lang.Exception: Could not sync using the meta sync class
org.apache.hudi.hive.HiveSyncTool
at
com.trepp.zone.ZoneExecutionHelper.upsert(ZoneExecutionHelper.scala:101)
at
com.trepp.zone.Presentation.$anonfun$writeHudiObject$1(Presentation.scala:92)
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at scala.util.Try$.apply(Try.scala:213)
at com.trepp.zone.Presentation.writeHudiObject(Presentation.scala:81)
at com.trepp.process.Executor.$anonfun$writeObject$2(Executor.scala:136)
at
com.trepp.process.Executor.$anonfun$writeObject$2$adapted(Executor.scala:133)
at scala.collection.immutable.List.foreach(List.scala:431)
at com.trepp.process.Executor.$anonfun$writeObject$1(Executor.scala:133)
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at scala.util.Try$.apply(Try.scala:213)
at com.trepp.process.Executor.writeObject(Executor.scala:133)
at
com.trepp.process.Executor$$anon$2.$anonfun$accept$2(Executor.scala:118)
at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at scala.util.Try$.apply(Try.scala:213)
at com.trepp.process.Executor$$anon$2.accept(Executor.scala:113)
at com.trepp.process.Executor$$anon$2.accept(Executor.scala:111)
at java.util.TreeMap.forEach(TreeMap.java:1005)
at com.trepp.process.Executor.executeQuery(Executor.scala:111)
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$3(EtlImpl.scala:43)
at scala.util.Try$.apply(Try.scala:213)
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1(EtlImpl.scala:37)
at
com.trepp.dataload.EtlImpl.$anonfun$executeProcess$1$adapted(EtlImpl.scala:23)
at
scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at
scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at com.trepp.dataload.EtlImpl.executeProcess(EtlImpl.scala:23)
at com.trepp.TreppClient$.$anonfun$main$1(TreppClient.scala:46)
at scala.util.Try$.apply(Try.scala:213)
at com.trepp.TreppClient$.main(TreppClient.scala:40)
at com.trepp.TreppClient.main(TreppClient.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)
at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]