Ferdinanddb commented on issue #58228:
URL: https://github.com/apache/airflow/issues/58228#issuecomment-3548702373

   @SameerMesiah97 Here is an example (with some redacted values in <>) but I 
don't think it has something to do with the issue:
   
   ```yaml
   apiVersion: sparkoperator.k8s.io/v1beta2
   kind: SparkApplication
   metadata:
     name: <job_name>
     namespace: spark-operator
   spec:
     type: Python
     pythonVersion: "3"
     mode: cluster
     image: "<spark_image>"
     imagePullPolicy: IfNotPresent
     mainApplicationFile: "local:///app/main.py"
     arguments: [
       "--some",
       "options",
     ]
     sparkVersion: 4.0.1
     sparkConf:
       spark.jars.ivy: "/tmp/.ivy2.5.2"
   
       spark.log.level: "WARN"
   
       spark.eventLog.enabled: "true"
       spark.eventLog.dir: "gs://<some_bucket>/spark-events/"
       spark.hadoop.fs.gs.impl: 
"com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem"
   
       spark.hadoop.fs.gs.auth.type: "APPLICATION_DEFAULT"
   
       spark.hadoop.fs.gs.auth.service.account.enable: "false"
       spark.hadoop.fs.gs.project.id: "<some_project>"
   
       spark.sql.extensions: 
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
       spark.sql.catalog.spark_catalog: 
org.apache.iceberg.spark.SparkSessionCatalog
   
       spark.sql.execution.arrow.pyspark.enabled: "true"
       spark.sql.decimalOperations.allowPrecisionLoss: "false"
       spark.sql.ansi.enabled: "true"
       spark.sql.shuffle.partitions: "2000"
       spark.sql.adaptive.enabled: "true"
       spark.serializer: "org.apache.spark.serializer.KryoSerializer"
       spark.io.compression.codec: "zstd"
       "spark.driver.maxResultSize": "0"
   
       spark.local.dir: /mnt/spark-local
   
     driver:
       nodeSelector:
         environment: prod
         app: spark-driver
         node-role: scale-to-zero
       tolerations:
       - key: "dedicated"
         operator: "Equal"
         value: "scale-to-zero"
         effect: "NoSchedule"
       env:
         - name: SPARK_K8S
           value: "true"
       cores: 3
       memory: 8g
       memoryOverhead: 3g
       serviceAccount: spark
       securityContext:
         capabilities:
           drop:
           - ALL
         runAsGroup: 185
         runAsUser: 185
         runAsNonRoot: true
         allowPrivilegeEscalation: false
         seccompProfile:
           type: RuntimeDefault
     executor:
       nodeSelector:
         environment: prod
         app: compute-highmem-plus
         node-role: scale-to-zero
       tolerations:
       - key: "dedicated"
         operator: "Equal"
         value: "scale-to-zero"
         effect: "NoSchedule"
       env:
         - name: SPARK_K8S
           value: "true"
       instances: 4
       cores: 4
       memory: 28g
       memoryOverhead: 8g
       volumeMounts:
         - name: spark-local-storage
           mountPath: /mnt/spark-local
       volumes:
         - name: spark-local-storage
           emptyDir: {}
       securityContext:
         capabilities:
           drop:
           - ALL
         runAsGroup: 185
         runAsUser: 185
         runAsNonRoot: true
         allowPrivilegeEscalation: false
         seccompProfile:
           type: RuntimeDefault
   
     dynamicAllocation:
       enabled: true
       initialExecutors: 4
       maxExecutors: 180
       minExecutors: 1
   
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to