appunni-dishq edited a comment on issue #9860:
URL: https://github.com/apache/airflow/issues/9860#issuecomment-696833576


   ```
   from airflow import DAG
   from airflow.contrib.operators.spark_submit_operator import 
SparkSubmitOperator
   from datetime import datetime, timedelta
   from airflow.models import Variable
   from airflow.contrib.operators.kubernetes_pod_operator import \
       KubernetesPodOperator
   from airflow.hooks.base_hook import BaseHook
   from airflow.contrib.operators.slack_webhook_operator import \
       SlackWebhookOperator
   from airflow.contrib.kubernetes.secret import Secret
   from kubernetes.client.models import V1VolumeMount as VolumeMount, \
       V1Volume as Volume
   
   
   conf = {**}
   conf["spark.kubernetes.authenticate.submission.oauthToken"] = \
       Variable.get("TOKEN")
   conf["spark.kubernetes.authenticate.submission.caCertFile"] = \
       "/mnt/secrets/ca.crt"
   
   gke_crt_volume = Volume(
       name="spark-crt",
       secret={
           "secretName": "spark-cert"
       }
   )
   
   gke_crt_volume_mount = VolumeMount(
       name="spark-crt",
       mount_path="/mnt/secrets",
       read_only=True
   )
   
   executor_config = {
       "KubernetesExecutor": {
           "request_memory": "500Mi",
           "limit_memory": "500Mi",
           "limit_cpu": "200m",
           "request_cpu": "200m",
           "node_selectors": {
               "env": "production"
           },
           volumes=[gke_crt_volume],
           volume_mounts=[gke_crt_volume_mount]
       }
   }
   default_args = {
       'owner': 'appunni',
       'depends_on_past': False,
       'email_on_failure': False,
       'email_on_retry': False,
       'start_date': datetime(2019, 7, 17),
       'retries': 2,
       'retry_delay': timedelta(seconds=60),
       'on_failure_callback': task_fail_slack_alert,
       'on_success_callback': on_success_slack_alert,
       'schedule_interval': None,
       "executor_config": executor_config,
       'pool': 'engineering'
   }
   
   dag = DAG(dag_id='spark_tasks',  catchup=False,
             default_args=default_args, schedule_interval=None)
   
   SparkSubmitOperator(
           task_id=name.replace("-", "_"),
           application="local:///main/{file}".format(file=file),
           name=name.replace("_", "-"),
           dag=_dag,
           env_vars=envs,
           conf=conf,
           executor_config=executor_config,
           conn_id=conn,
           jars="local:///spark/app-assembly.jar",
       )
   ```
   
   This is what it roughly looks like. I tried to include all details as much 
as possible let me know if you need more @dimberman 
   
   


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to