GitHub user rcrchawla added a comment to the discussion: Airflow task failed 
but spark kube app is running

Please find below airflow config : 

[celery]
flower_url_prefix =
worker_concurrency = 16

[celery_kubernetes_executor]
kubernetes_queue = kubernetes

[core]
auth_manager = 
airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager
colored_console_log = False
dags_folder = /opt/airflow/dags/repo/prod/dags
execution_api_server_url = http://airflow-api-server:8080/execution/
executor = CeleryExecutor
load_examples = False
remote_logging = False

[dag_processor]
bundle_refresh_check_interval = 43200
min_file_process_interval = 43200
print_stats_interval = 43200
stale_dag_threshold = 86400

[database]
sql_alchemy_engine_args = {"pool_pre_ping": true}

[scheduler]
run_duration = 41460
standalone_dag_processor = True
statsd_host = airflow-statsd
statsd_on = True
statsd_port = 9125
statsd_prefix = airflow
task_instance_heartbeat_timeout = 900

Thanks

GitHub link: 
https://github.com/apache/airflow/discussions/63298#discussioncomment-16073872

----
This is an automatically sent email for [email protected].
To unsubscribe, please send an email to: [email protected]

Reply via email to