[ 
https://issues.apache.org/jira/browse/SPARK-40638?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

test2022123 updated SPARK-40638:
--------------------------------
    Environment: 
mac 12.6

Python 3.8.13

spark-3.3.0-bin-hadoop3

  was:
mac 12.6

Python 3.8.13

spark-3.3.0-bin-hadoop3

 

docker-compose.yml:
{code:java}
version: '3'services:
  spark-master:
    image: docker.io/bitnami/spark:3.3
    hostname: spark-master
    environment:
      - SPARK_MODE=master
      - SPARK_RPC_AUTHENTICATION_ENABLED=no
      - SPARK_RPC_ENCRYPTION_ENABLED=no
      - SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
      - SPARK_SSL_ENABLED=no
      - SPARK_LOCAL_HOSTNAME=spark-master  
    ports:
      - '8080:8080'
      - '7077:7077'
    networks:
      - spark-network
      
      
  spark-worker-1:
    image: docker.io/bitnami/spark:3.3
    hostname: spark-worker-1
    depends_on: 
      - spark-master
    environment:
      - SPARK_MODE=worker
      - SPARK_MASTER_URL=spark://spark-master:7077
      - SPARK_WORKER_MEMORY=4g
      - SPARK_WORKER_CORES=8
      - SPARK_WORKER_PORT=6061
      - SPARK_RPC_AUTHENTICATION_ENABLED=no
      - SPARK_RPC_ENCRYPTION_ENABLED=no
      - SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
      - SPARK_SSL_ENABLED=no
      - SPARK_LOCAL_HOSTNAME=spark-worker-1
    ports:
      - '14040:4040'
      - '18081:8081'
      - '16061:6061'
    networks:
      - spark-network
      
      
  spark-worker-2:
    image: docker.io/bitnami/spark:3.3
    hostname: spark-worker-2
    depends_on: 
      - spark-worker-1
    environment:
      - SPARK_MODE=worker
      - SPARK_MASTER_URL=spark://spark-master:7077
      - SPARK_WORKER_MEMORY=4g
      - SPARK_WORKER_CORES=8
      - SPARK_WORKER_PORT=6062
      - SPARK_RPC_AUTHENTICATION_ENABLED=no
      - SPARK_RPC_ENCRYPTION_ENABLED=no
      - SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
      - SPARK_SSL_ENABLED=no
      - SPARK_LOCAL_HOSTNAME=spark-worker-2
    ports:
      - '24040:4040'
      - '28081:8081'    
      - '26062:6062'
    networks:
      - spark-networknetworks:
    spark-network: {code}
 

 


> RpcOutboxMessage: Ask terminated before connecting successfully
> ---------------------------------------------------------------
>
>                 Key: SPARK-40638
>                 URL: https://issues.apache.org/jira/browse/SPARK-40638
>             Project: Spark
>          Issue Type: Bug
>          Components: PySpark
>    Affects Versions: 3.3.0
>         Environment: mac 12.6
> Python 3.8.13
> spark-3.3.0-bin-hadoop3
>            Reporter: test2022123
>            Priority: Major
>
> {color:#ff0000}*Pyspark submit job stuck and infinitely retry.*{color}
> *pyspark job running with:*
> {code:java}
> $ PYSPARK_PYTHON=python 
> SPARK_HOME="/Users/mike/Tools/spark-3.3.0-bin-hadoop3" pyspark --master 
> spark://spark-master:7077                                                     
>                                                                               
>        [10:20:25]
> Python 3.8.13 (default, Mar 28 2022, 06:16:26)
> [Clang 12.0.0 ] :: Anaconda, Inc. on darwin
> Type "help", "copyright", "credits" or "license" for more information.
> 22/10/03 10:23:32 WARN Utils: Your hostname, codecan.local resolves to a 
> loopback address: 127.0.0.1; using 192.168.31.31 instead (on interface en5)
> 22/10/03 10:23:32 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to 
> another address
> Setting default log level to "WARN".
> To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use 
> setLogLevel(newLevel).
> 22/10/03 10:23:32 WARN NativeCodeLoader: Unable to load native-hadoop library 
> for your platform... using builtin-java classes where applicable
> Welcome to
>       ____              __
>      / __/__  ___ _____/ /__
>     _\ \/ _ \/ _ `/ __/  '_/
>    /__ / .__/\_,_/_/ /_/\_\   version 3.3.0
>       /_/Using Python version 3.8.13 (default, Mar 28 2022 06:16:26)
> Spark context Web UI available at http://192.168.31.31:4040
> Spark context available as 'sc' (master = spark://spark-master:7077, app id = 
> app-20221003022333-0000).
> SparkSession available as 'spark'.
> >>> from pyspark.sql.functions import col
> >>> spark.range(0,5).select(col("id").cast("double")).agg({'id': 
> >>> 'sum'}).show()
> 22/10/03 10:24:24 WARN TaskSchedulerImpl: Initial job has not accepted any 
> resources; check your cluster UI to ensure that workers are registered and 
> have sufficient resources
> 22/10/03 10:24:39 WARN TaskSchedulerImpl: Initial job has not accepted any 
> resources; check your cluster UI to ensure that workers are registered and 
> have sufficient resources
> 22/10/03 10:24:54 WARN TaskSchedulerImpl: Initial job has not accepted any 
> resources; check your cluster UI to ensure that workers are registered and 
> have sufficient resources
> 22/10/03 10:25:09 WARN TaskSchedulerImpl: Initial job has not accepted any 
> resources; check your cluster UI to ensure that workers are registered and 
> have sufficient resources
> 22/10/03 10:25:24 WARN TaskSchedulerImpl: Initial job has not accepted any 
> resources; check your cluster UI to ensure that workers are registered and 
> have sufficient resources {code}
> *spark-defaults.conf*
> {code:java}
> spark.driver.port 13333
> spark.executor.memory 512m
> spark.executor.cores 1
> spark.executor.instances 2
> spark.cores.max 1
> spark.shuffle.service.enabled false
> spark.dynamicAllocation.enabled false {code}
>  
>  
>  
>  
> h1.  
> *stderr log page for app-20221003022333-0000/0*
> {code:java}
> Spark Executor Command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "0" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> ========================================
> Using Spark's default log4j profile: 
> org/apache/spark/log4j2-defaults.properties
> Exception in thread "main" java.lang.reflect.UndeclaredThrowableException
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1894)
>       at 
> org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:61)
>       at 
> org.apache.spark.executor.CoarseGrainedExecutorBackend$.run(CoarseGrainedExecutorBackend.scala:424)
>       at 
> org.apache.spark.executor.CoarseGrainedExecutorBackend$.main(CoarseGrainedExecutorBackend.scala:413)
>       at 
> org.apache.spark.executor.CoarseGrainedExecutorBackend.main(CoarseGrainedExecutorBackend.scala)
> Caused by: org.apache.spark.rpc.RpcTimeoutException: Futures timed out after 
> [120 seconds]. This timeout is controlled by spark.rpc.lookupTimeout
>       at 
> org.apache.spark.rpc.RpcTimeout.org$apache$spark$rpc$RpcTimeout$$createRpcTimeoutException(RpcTimeout.scala:47)
>       at 
> org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:62)
>       at 
> org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:58)
>       at 
> scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38)
>       at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:76)
>       at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)
>       at 
> org.apache.spark.executor.CoarseGrainedExecutorBackend$.$anonfun$run$9(CoarseGrainedExecutorBackend.scala:444)
>       at 
> scala.runtime.java8.JFunction1$mcVI$sp.apply(JFunction1$mcVI$sp.java:23)
>       at 
> scala.collection.TraversableLike$WithFilter.$anonfun$foreach$1(TraversableLike.scala:985)
>       at scala.collection.immutable.Range.foreach(Range.scala:158)
>       at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:984)
>       at 
> org.apache.spark.executor.CoarseGrainedExecutorBackend$.$anonfun$run$7(CoarseGrainedExecutorBackend.scala:442)
>       at 
> org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:62)
>       at 
> org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:61)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
>       ... 4 more
> Caused by: java.util.concurrent.TimeoutException: Futures timed out after 
> [120 seconds]
>       at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:259)
>       at 
> scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:263)
>       at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:293)
>       at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
>       ... 16 more {code}
>  
>  
> h1.  *[http://spark-worker-1:18081/]* stdout log page for 
> app-20221003022333-0000/0
>  
> {code:java}
> 22/10/03 02:23:35 INFO CoarseGrainedExecutorBackend: Started daemon with 
> process name: 107@spark-worker-1
> 22/10/03 02:23:35 INFO SignalUtils: Registering signal handler for TERM
> 22/10/03 02:23:35 INFO SignalUtils: Registering signal handler for HUP
> 22/10/03 02:23:35 INFO SignalUtils: Registering signal handler for INT
> 22/10/03 02:23:35 WARN NativeCodeLoader: Unable to load native-hadoop library 
> for your platform... using builtin-java classes where applicable
> 22/10/03 02:23:35 INFO SecurityManager: Changing view acls to: spark,mike
> 22/10/03 02:23:35 INFO SecurityManager: Changing modify acls to: spark,mike
> 22/10/03 02:23:35 INFO SecurityManager: Changing view acls groups to: 
> 22/10/03 02:23:35 INFO SecurityManager: Changing modify acls groups to: 
> 22/10/03 02:23:35 INFO SecurityManager: SecurityManager: authentication 
> disabled; ui acls disabled; users  with view permissions: Set(spark, mike); 
> groups with view permissions: Set(); users  with modify permissions: 
> Set(spark, mike); groups with modify permissions: Set()
> 22/10/03 02:25:35 ERROR RpcOutboxMessage: Ask terminated before connecting 
> successfully
> 22/10/03 02:25:35 WARN NettyRpcEnv: Ignored failure: java.io.IOException: 
> Connecting to /192.168.31.31:13333 timed out (120000 ms)
> 22/10/03 02:27:35 ERROR RpcOutboxMessage: Ask terminated before connecting 
> successfully
> 22/10/03 02:27:35 WARN NettyRpcEnv: Ignored failure: java.io.IOException: 
> Connecting to /192.168.31.31:13333 timed out (120000 ms) {code}
>  
>  
>  
> docker-compose log:
> {code:java}
> $ docker-compose up                                                           
>                                                                               
>                                                                               
>                          [10:21:46]
> dcup='docker-compose up'
> dco=docker-compose
> [+] Running 3/3
>  ⠿ Container spark-spark-master-1    Created                                  
>                                                                               
>                                                                               
>                                0.0s
>  ⠿ Container spark-spark-worker-1-1  Recreated                                
>                                                                               
>                                                                               
>                                0.1s
>  ⠿ Container spark-spark-worker-2-1  Recreated                                
>                                                                               
>                                                                               
>                                0.1s
> Attaching to spark-spark-master-1, spark-spark-worker-1-1, 
> spark-spark-worker-2-1
> spark-spark-master-1    |  02:23:19.14
> spark-spark-master-1    |  02:23:19.14 Welcome to the Bitnami spark container
> spark-spark-master-1    |  02:23:19.14 Subscribe to project updates by 
> watching https://github.com/bitnami/containers
> spark-spark-master-1    |  02:23:19.14 Submit issues and feature requests at 
> https://github.com/bitnami/containers/issues
> spark-spark-master-1    |  02:23:19.14
> spark-spark-master-1    |  02:23:19.15 INFO  ==> ** Starting Spark setup **
> spark-spark-master-1    | realpath: /bitnami/spark/conf: No such file or 
> directory
> spark-spark-master-1    |  02:23:19.17 INFO  ==> Detected mounted 
> configuration file...
> spark-spark-master-1    | find: '/docker-entrypoint-initdb.d/': No such file 
> or directory
> spark-spark-master-1    |  02:23:19.17 INFO  ==> No custom scripts in 
> /docker-entrypoint-initdb.d
> spark-spark-master-1    |  02:23:19.18 INFO  ==> ** Spark setup finished! **
> spark-spark-master-1    |
> spark-spark-master-1    |  02:23:19.19 INFO  ==> ** Starting Spark in master 
> mode **
> spark-spark-master-1    | starting org.apache.spark.deploy.master.Master, 
> logging to 
> /opt/bitnami/spark/logs/spark--org.apache.spark.deploy.master.Master-1-spark-master.out
> spark-spark-master-1    | Spark Command: /opt/bitnami/java/bin/java -cp 
> /opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/* -Xmx1g 
> org.apache.spark.deploy.master.Master --host spark-master --port 7077 
> --webui-port 8080
> spark-spark-master-1    | ========================================
> spark-spark-worker-1-1  |  02:23:19.48
> spark-spark-worker-1-1  |  02:23:19.48 Welcome to the Bitnami spark container
> spark-spark-worker-1-1  |  02:23:19.48 Subscribe to project updates by 
> watching https://github.com/bitnami/containers
> spark-spark-worker-1-1  |  02:23:19.48 Submit issues and feature requests at 
> https://github.com/bitnami/containers/issues
> spark-spark-worker-1-1  |  02:23:19.48
> spark-spark-worker-1-1  |  02:23:19.50 INFO  ==> ** Starting Spark setup **
> spark-spark-worker-1-1  | realpath: /bitnami/spark/conf: No such file or 
> directory
> spark-spark-worker-1-1  |  02:23:19.53 INFO  ==> Generating Spark 
> configuration file...
> spark-spark-worker-1-1  | find: '/docker-entrypoint-initdb.d/': No such file 
> or directory
> spark-spark-worker-1-1  |  02:23:19.54 INFO  ==> No custom scripts in 
> /docker-entrypoint-initdb.d
> spark-spark-worker-1-1  |  02:23:19.55 INFO  ==> ** Spark setup finished! **
> spark-spark-worker-1-1  |
> spark-spark-worker-1-1  |  02:23:19.57 INFO  ==> ** Starting Spark in worker 
> mode **
> spark-spark-worker-1-1  | starting org.apache.spark.deploy.worker.Worker, 
> logging to 
> /opt/bitnami/spark/logs/spark--org.apache.spark.deploy.worker.Worker-1-spark-worker-1.out
> spark-spark-worker-1-1  | Spark Command: /opt/bitnami/java/bin/java -cp 
> /opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/* -Xmx1g 
> org.apache.spark.deploy.worker.Worker --webui-port 8081 --port 6061 
> spark://spark-master:7077
> spark-spark-worker-1-1  | ========================================
> spark-spark-worker-2-1  |  02:23:19.98
> spark-spark-worker-2-1  |  02:23:19.98 Welcome to the Bitnami spark container
> spark-spark-worker-2-1  |  02:23:19.98 Subscribe to project updates by 
> watching https://github.com/bitnami/containers
> spark-spark-worker-2-1  |  02:23:19.98 Submit issues and feature requests at 
> https://github.com/bitnami/containers/issues
> spark-spark-worker-2-1  |  02:23:19.99
> spark-spark-worker-2-1  |  02:23:20.00 INFO  ==> ** Starting Spark setup **
> spark-spark-worker-2-1  | realpath: /bitnami/spark/conf: No such file or 
> directory
> spark-spark-worker-2-1  |  02:23:20.03 INFO  ==> Generating Spark 
> configuration file...
> spark-spark-worker-2-1  | find: '/docker-entrypoint-initdb.d/': No such file 
> or directory
> spark-spark-worker-2-1  |  02:23:20.04 INFO  ==> No custom scripts in 
> /docker-entrypoint-initdb.d
> spark-spark-worker-2-1  |  02:23:20.04 INFO  ==> ** Spark setup finished! **
> spark-spark-worker-2-1  |
> spark-spark-worker-2-1  |  02:23:20.06 INFO  ==> ** Starting Spark in worker 
> mode **
> spark-spark-worker-2-1  | starting org.apache.spark.deploy.worker.Worker, 
> logging to 
> /opt/bitnami/spark/logs/spark--org.apache.spark.deploy.worker.Worker-1-spark-worker-2.out
> spark-spark-worker-2-1  | Spark Command: /opt/bitnami/java/bin/java -cp 
> /opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/* -Xmx1g 
> org.apache.spark.deploy.worker.Worker --webui-port 8081 --port 6062 
> spark://spark-master:7077
> spark-spark-worker-2-1  | ========================================
> spark-spark-master-1    | Using Spark's default log4j profile: 
> org/apache/spark/log4j2-defaults.properties
> spark-spark-master-1    | 22/10/03 02:23:20 INFO Master: Started daemon with 
> process name: 41@spark-master
> spark-spark-master-1    | 22/10/03 02:23:20 INFO SignalUtils: Registering 
> signal handler for TERM
> spark-spark-master-1    | 22/10/03 02:23:20 INFO SignalUtils: Registering 
> signal handler for HUP
> spark-spark-master-1    | 22/10/03 02:23:20 INFO SignalUtils: Registering 
> signal handler for INT
> spark-spark-worker-1-1  | Using Spark's default log4j profile: 
> org/apache/spark/log4j2-defaults.properties
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO Worker: Started daemon with 
> process name: 40@spark-worker-1
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for TERM
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for HUP
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for INT
> spark-spark-master-1    | 22/10/03 02:23:21 WARN NativeCodeLoader: Unable to 
> load native-hadoop library for your platform... using builtin-java classes 
> where applicable
> spark-spark-master-1    | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-master-1    | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-master-1    | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-master-1    | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-master-1    | 22/10/03 02:23:21 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-2-1  | Using Spark's default log4j profile: 
> org/apache/spark/log4j2-defaults.properties
> spark-spark-worker-2-1  | 22/10/03 02:23:21 INFO Worker: Started daemon with 
> process name: 40@spark-worker-2
> spark-spark-worker-2-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for TERM
> spark-spark-worker-2-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for HUP
> spark-spark-worker-2-1  | 22/10/03 02:23:21 INFO SignalUtils: Registering 
> signal handler for INT
> spark-spark-worker-1-1  | 22/10/03 02:23:21 WARN NativeCodeLoader: Unable to 
> load native-hadoop library for your platform... using builtin-java classes 
> where applicable
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:23:21 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-master-1    | 22/10/03 02:23:21 INFO Utils: Successfully started 
> service 'sparkMaster' on port 7077.
> spark-spark-master-1    | 22/10/03 02:23:21 INFO Master: Starting Spark 
> master at spark://spark-master:7077
> spark-spark-master-1    | 22/10/03 02:23:21 INFO Master: Running Spark 
> version 3.3.0
> spark-spark-master-1    | 22/10/03 02:23:21 INFO Utils: Successfully started 
> service 'MasterUI' on port 8080.
> spark-spark-master-1    | 22/10/03 02:23:21 INFO MasterWebUI: Bound 
> MasterWebUI to 0.0.0.0, and started at http://spark-master:8080
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Utils: Successfully started 
> service 'sparkWorker' on port 6061.
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Worker 
> decommissioning not enabled.
> spark-spark-worker-2-1  | 22/10/03 02:23:22 WARN NativeCodeLoader: Unable to 
> load native-hadoop library for your platform... using builtin-java classes 
> where applicable
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-master-1    | 22/10/03 02:23:22 INFO Master: I have been elected 
> leader! New state: ALIVE
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Starting Spark 
> worker spark-worker-1:6061 with 8 cores, 4.0 GiB RAM
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Running Spark 
> version 3.3.0
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Spark home: 
> /opt/bitnami/spark
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO ResourceUtils: 
> ==============================================================
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO ResourceUtils: No custom 
> resources configured for spark.worker.
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO ResourceUtils: 
> ==============================================================
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Utils: Successfully started 
> service 'WorkerUI' on port 8081.
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Utils: Successfully started 
> service 'sparkWorker' on port 6062.
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Worker: Worker 
> decommissioning not enabled.
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO WorkerWebUI: Bound 
> WorkerWebUI to 0.0.0.0, and started at http://spark-worker-1:8081
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Connecting to master 
> spark-master:7077...
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO TransportClientFactory: 
> Successfully created connection to spark-master/172.18.0.2:7077 after 27 ms 
> (0 ms spent in bootstraps)
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Worker: Starting Spark 
> worker spark-worker-2:6062 with 8 cores, 4.0 GiB RAM
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Worker: Running Spark 
> version 3.3.0
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Worker: Spark home: 
> /opt/bitnami/spark
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO ResourceUtils: 
> ==============================================================
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO ResourceUtils: No custom 
> resources configured for spark.worker.
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO ResourceUtils: 
> ==============================================================
> spark-spark-master-1    | 22/10/03 02:23:22 INFO Master: Registering worker 
> spark-worker-1:6061 with 8 cores, 4.0 GiB RAM
> spark-spark-worker-1-1  | 22/10/03 02:23:22 INFO Worker: Successfully 
> registered with master spark://spark-master:7077
> spark-spark-worker-2-1  | 22/10/03 02:23:22 INFO Utils: Successfully started 
> service 'WorkerUI' on port 8081.
> spark-spark-worker-2-1  | 22/10/03 02:23:23 INFO WorkerWebUI: Bound 
> WorkerWebUI to 0.0.0.0, and started at http://spark-worker-2:8081
> spark-spark-worker-2-1  | 22/10/03 02:23:23 INFO Worker: Connecting to master 
> spark-master:7077...
> spark-spark-worker-2-1  | 22/10/03 02:23:23 INFO TransportClientFactory: 
> Successfully created connection to spark-master/172.18.0.2:7077 after 29 ms 
> (0 ms spent in bootstraps)
> spark-spark-master-1    | 22/10/03 02:23:23 INFO Master: Registering worker 
> spark-worker-2:6062 with 8 cores, 4.0 GiB RAM
> spark-spark-worker-2-1  | 22/10/03 02:23:23 INFO Worker: Successfully 
> registered with master spark://spark-master:7077
> spark-spark-master-1    | 22/10/03 02:23:33 INFO Master: Registering app 
> PySparkShell
> spark-spark-master-1    | 22/10/03 02:23:33 INFO Master: Registered app 
> PySparkShell with ID app-20221003022333-0000
> spark-spark-master-1    | 22/10/03 02:23:34 INFO Master: Launching executor 
> app-20221003022333-0000/0 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/0 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:23:34 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "0" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO Worker: Executor 
> app-20221003022333-0000/0 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 0
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=0)
> spark-spark-master-1    | 22/10/03 02:27:36 INFO Master: Removing executor 
> app-20221003022333-0000/0 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:27:36 INFO Master: Launching executor 
> app-20221003022333-0000/1 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/1 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:27:36 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "1" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO Worker: Executor 
> app-20221003022333-0000/1 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 1
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=1)
> spark-spark-master-1    | 22/10/03 02:31:37 INFO Master: Removing executor 
> app-20221003022333-0000/1 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:31:37 INFO Master: Launching executor 
> app-20221003022333-0000/2 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/2 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:31:37 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "2" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO Worker: Executor 
> app-20221003022333-0000/2 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 2
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=2)
> spark-spark-master-1    | 22/10/03 02:35:39 INFO Master: Removing executor 
> app-20221003022333-0000/2 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:35:39 INFO Master: Launching executor 
> app-20221003022333-0000/3 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/3 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:35:39 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:35:40 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "3" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO Worker: Executor 
> app-20221003022333-0000/3 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 3
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=3)
> spark-spark-master-1    | 22/10/03 02:39:42 INFO Master: Removing executor 
> app-20221003022333-0000/3 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:39:42 INFO Master: Launching executor 
> app-20221003022333-0000/4 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/4 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:39:42 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "4" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO Worker: Executor 
> app-20221003022333-0000/4 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 4
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=4)
> spark-spark-master-1    | 22/10/03 02:43:44 INFO Master: Removing executor 
> app-20221003022333-0000/4 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:43:44 INFO Master: Launching executor 
> app-20221003022333-0000/5 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/5 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:43:44 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "5" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061"
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO Worker: Executor 
> app-20221003022333-0000/5 finished with state EXITED message Command exited 
> with code 1 exitStatus 1
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO 
> ExternalShuffleBlockResolver: Clean up non-shuffle and non-RDD files 
> associated with the finished executor 5
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO 
> ExternalShuffleBlockResolver: Executor is not registered 
> (appId=app-20221003022333-0000, execId=5)
> spark-spark-master-1    | 22/10/03 02:47:47 INFO Master: Removing executor 
> app-20221003022333-0000/5 because it is EXITED
> spark-spark-master-1    | 22/10/03 02:47:47 INFO Master: Launching executor 
> app-20221003022333-0000/6 on worker worker-20221003022322-spark-worker-1-6061
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO Worker: Asked to launch 
> executor app-20221003022333-0000/6 for PySparkShell
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO SecurityManager: Changing 
> view acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO SecurityManager: Changing 
> modify acls to: spark
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO SecurityManager: Changing 
> view acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO SecurityManager: Changing 
> modify acls groups to:
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO SecurityManager: 
> SecurityManager: authentication disabled; ui acls disabled; users  with view 
> permissions: Set(spark); groups with view permissions: Set(); users  with 
> modify permissions: Set(spark); groups with modify permissions: Set()
> spark-spark-worker-1-1  | 22/10/03 02:47:47 INFO ExecutorRunner: Launch 
> command: "/opt/bitnami/java/bin/java" "-cp" 
> "/opt/bitnami/spark/conf/:/opt/bitnami/spark/jars/*" "-Xmx512M" 
> "-Dspark.driver.port=13333" "-XX:+IgnoreUnrecognizedVMOptions" 
> "--add-opens=java.base/java.lang=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED" 
> "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED" 
> "--add-opens=java.base/java.io=ALL-UNNAMED" 
> "--add-opens=java.base/java.net=ALL-UNNAMED" 
> "--add-opens=java.base/java.nio=ALL-UNNAMED" 
> "--add-opens=java.base/java.util=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED" 
> "--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED" 
> "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED" 
> "--add-opens=java.base/sun.security.action=ALL-UNNAMED" 
> "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED" 
> "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" 
> "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" 
> "spark://[email protected]:13333" "--executor-id" "6" 
> "--hostname" "spark-worker-1" "--cores" "1" "--app-id" 
> "app-20221003022333-0000" "--worker-url" "spark://Worker@spark-worker-1:6061" 
> {code}
>  
> *network log:*
> {code:java}
> $ ps -ef|grep java                                                            
>                                                                               
>                                                                               
>                          [10:47:25]   501 69079 69073   0 10:23AM ttys000    
> 0:23.56 /Users/mike/.sdkman/candidates/java/current/bin/java -cp 
> /Users/mike/Tools/spark-3.3.0-bin-hadoop3/conf/:/Users/mike/Tools/spark-3.3.0-bin-hadoop3/jars/*
>  -Xmx1g -XX:+IgnoreUnrecognizedVMOptions 
> --add-opens=java.base/java.lang=ALL-UNNAMED 
> --add-opens=java.base/java.lang.invoke=ALL-UNNAMED 
> --add-opens=java.base/java.lang.reflect=ALL-UNNAMED 
> --add-opens=java.base/java.io=ALL-UNNAMED 
> --add-opens=java.base/java.net=ALL-UNNAMED 
> --add-opens=java.base/java.nio=ALL-UNNAMED 
> --add-opens=java.base/java.util=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent=ALL-UNNAMED 
> --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.ch=ALL-UNNAMED 
> --add-opens=java.base/sun.nio.cs=ALL-UNNAMED 
> --add-opens=java.base/sun.security.action=ALL-UNNAMED 
> --add-opens=java.base/sun.util.calendar=ALL-UNNAMED 
> --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED 
> org.apache.spark.deploy.SparkSubmit --master spark://spark-master:7077 --name 
> PySparkShell pyspark-shell   501 71014 69669   0 10:48AM ttys002    0:00.00 
> grep --color=auto --exclude-dir=.bzr --exclude-dir=CVS --exclude-dir=.git 
> --exclude-dir=.hg --exclude-dir=.svn --exclude-dir=.idea --exclude-dir=.tox 
> java
> $ netstat -anv|grep 69079                                                     
>                                                                               
>                                                                               
>                          [10:46:39] tcp4       0      0  192.168.31.31.65367  
>   *.*                    LISTEN      131072 131072  69079      0 0x0100 
> 0x00000006 tcp4       0      0  127.0.0.1.65366        127.0.0.1.7077         
> ESTABLISHED 386579 146988  69079      0 0x0102 0x00000008 tcp46      0      0 
>  *.4040                 *.*                    LISTEN      131072 131072  
> 69079      0 0x0000 0x00000006 tcp4       0      0  127.0.0.1.65363        
> 127.0.0.1.65365        ESTABLISHED 407961 146988  69079      0 0x0002 
> 0x00000004 tcp4       0      0  192.168.31.31.13333    *.*                    
> LISTEN      131072 131072  69079      0 0x0100 0x00000006 tcp4       0      0 
>  127.0.0.1.65363        127.0.0.1.65364        ESTABLISHED 402840 146988  
> 69079      0 0x0002 0x00000004 tcp4       0      0  127.0.0.1.65363        
> *.*                    LISTEN      131072 131072  69079      0 0x0100 
> 0x00000006 kctl       0      0 131072   2048  69079      0     17      6 
> com.apple.netsrc  {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to