This is an automated email from the ASF dual-hosted git repository.

ash pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 48f5149bc78ef6b8fc37e1757abc1943fca6daf1
Author: Jed Cunningham <[email protected]>
AuthorDate: Mon Mar 22 18:55:39 2021 -0600

    Compare string values, not if strings are the same object (#14942)
    
    I found this when investigating why the delete_worker_pods_on_failure flag 
wasn't working. The feature has sufficient test coverage, but doesn't fail 
simply because the strings have the same id when running in the test suite, 
which is exactly what happens in practice.
    
    flake8/pylint also don't seem to raise their respective failures unless one 
side it literally a literal string, even though typing is applied 🤷‍♂️.
    
    I fixed 2 other occurrences I found while I was at it.
    
    (cherry picked from commit 6d30464319216981d10eec1d373646f043fe766c)
---
 airflow/executors/kubernetes_executor.py | 2 +-
 airflow/models/dagrun.py                 | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/airflow/executors/kubernetes_executor.py 
b/airflow/executors/kubernetes_executor.py
index c42531a..7e3d82b 100644
--- a/airflow/executors/kubernetes_executor.py
+++ b/airflow/executors/kubernetes_executor.py
@@ -576,7 +576,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
             if self.kube_config.delete_worker_pods:
                 if not self.kube_scheduler:
                     raise AirflowException(NOT_STARTED_MESSAGE)
-                if state is not State.FAILED or 
self.kube_config.delete_worker_pods_on_failure:
+                if state != State.FAILED or 
self.kube_config.delete_worker_pods_on_failure:
                     self.kube_scheduler.delete_pod(pod_id, namespace)
                     self.log.info('Deleted pod: %s in namespace %s', str(key), 
str(namespace))
             try:
diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py
index 674d4df..f1c32b1 100644
--- a/airflow/models/dagrun.py
+++ b/airflow/models/dagrun.py
@@ -619,7 +619,7 @@ class DagRun(Base, LoggingMixin):
             return
 
         duration = self.end_date - self.start_date
-        if self.state is State.SUCCESS:
+        if self.state == State.SUCCESS:
             Stats.timing(f'dagrun.duration.success.{self.dag_id}', duration)
         elif self.state == State.FAILED:
             Stats.timing(f'dagrun.duration.failed.{self.dag_id}', duration)
@@ -647,7 +647,7 @@ class DagRun(Base, LoggingMixin):
             except AirflowException:
                 if ti.state == State.REMOVED:
                     pass  # ti has already been removed, just ignore it
-                elif self.state is not State.RUNNING and not dag.partial:
+                elif self.state != State.RUNNING and not dag.partial:
                     self.log.warning("Failed to get task '%s' for dag '%s'. 
Marking it as removed.", ti, dag)
                     Stats.incr(f"task_removed_from_dag.{dag.dag_id}", 1, 1)
                     ti.state = State.REMOVED

Reply via email to