lucas481516 commented on issue #26394: URL: https://github.com/apache/airflow/issues/26394#issuecomment-1258420530
Hello @eladkal, I work with @bertrand-buffat, I allow myself to add certain elements in order to be as precise as possible on the current problem. Cluster version kubernetes : 1.21 AWS platform version: eks.10 (https://docs.aws.amazon.com/eks/latest/userguide/platform-versions.html#:~:text=April%204%2C%202022-,Kubernetes%20version%201.21,-The%20following%20admission) (example of dag who work on 2.2.2 and failed on version 2.4.0) Thanks for your help 🙂 `### def DataKubeDag from kubernetes.client import models as k8s import copy from airflow.models import DAG from config import CONFIG class DataKubeDAG(DAG): def __init__(self, dag_id, default_args, tags=[], **kwargs): self.log.info(f"Loading dag: {dag_id}") full_tags += [default_args["owner"]] full_tags += tags super().__init__(dag_id=dag_id, default_args=default_args, tags=full_tags, **kwargs) def add_task(self, task): custom_annotations = k8s.V1ObjectMeta( annotations={"ad.datadoghq.com/tags": f'{{"airflow_dag_id": "{self.dag_id}", "airflow_task_id": "{task.task_id}"}}'} ) if task.executor_config.get("pod_override") is None: task.executor_config = {"pod_override": k8s.V1Pod()} task.executor_config["pod_override"].metadata = custom_annotations super().add_task(task) return ### end def DataKubeDAG ### def obtain_default_args from kubernetes.client import models as k8s from datetime import timedelta from airflow.utils.dates import days_ago # Recreate this dict every time a DAG needs to get default_args def obtain_default_args(owner, retries: int = 6): return { "owner": "me", "depends_on_past": False, "email": "[email protected]", "email_on_failure": False, "email_on_retry": False, "retries": retries, "retry_delay": timedelta(minutes=5), "start_date": days_ago(1), "catchup": False, "execution_timeout": timedelta(seconds=86400), "priority_weight": 1, "executor_config": {"pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(annotations={}))}, } ### end def obtain_default_args ### start dag liveness who failed from airflow.operators.dummy_operator import DummyOperator from functions.default_args import obtain_default_args from plugins.models.airflow import DataKubeDAG default_args = obtain_default_args() default_args["retries"] = 0 with DataKubeDAG( dag_id="sys_liveness", schedule_interval="*/5 * * * *", default_args=default_args, catchup=False, ) as dag: start_task = DummyOperator(task_id="liveness") ### end dag liveness` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
