This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 659150689e6 Fix Databricks provider tests (#48877)
659150689e6 is described below

commit 659150689e631c610ea18485b9986e3ff27f0b36
Author: Kaxil Naik <[email protected]>
AuthorDate: Mon Apr 7 20:24:08 2025 +0530

    Fix Databricks provider tests (#48877)
    
    https://github.com/apache/airflow/pull/46704 added changes but the tests 
were failing
    
    ```
    =========================== short test summary info 
============================
    FAILED 
providers/databricks/tests/unit/databricks/plugins/test_databricks_workflow.py::test_repair_task_with_params
 - AssertionError: expected call not found.
    Expected: repair_run({'run_id': 12345, 'rerun_tasks': ['task1', 'task2'], 
'overriding_parameters': {'key1': 'value1', 'key2': 'value2'}})
    Actual: repair_run({'run_id': 12345, 'latest_repair_id': 100, 
'rerun_tasks': ['task1', 'task2'], 'key1': 'value1', 'key2': 'value2'})
    ====== 1 failed, 6631 passed, 122 skipped, 1 warning in 793.11s (0:13:13) 
======
    
    ```
    
    https://github.com/apache/airflow/actions/runs/14309323849/job/40100685431?
---
 .../src/airflow/providers/databricks/plugins/databricks_workflow.py   | 4 +++-
 .../tests/unit/databricks/plugins/test_databricks_workflow.py         | 1 +
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git 
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
 
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
index 1b6466f7678..16ea7a0b611 100644
--- 
a/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
+++ 
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
@@ -154,9 +154,11 @@ def _repair_task(
         "run_id": databricks_run_id,
         "latest_repair_id": repair_history_id,
         "rerun_tasks": tasks_to_repair,
-        **run_data.get("overriding_parameters", {}),
     }
 
+    if "overriding_parameters" in run_data:
+        repair_json["overriding_parameters"] = 
run_data["overriding_parameters"]
+
     return hook.repair_run(repair_json)
 
 
diff --git 
a/providers/databricks/tests/unit/databricks/plugins/test_databricks_workflow.py
 
b/providers/databricks/tests/unit/databricks/plugins/test_databricks_workflow.py
index 638c3afe1cb..3c6f57c1d9d 100644
--- 
a/providers/databricks/tests/unit/databricks/plugins/test_databricks_workflow.py
+++ 
b/providers/databricks/tests/unit/databricks/plugins/test_databricks_workflow.py
@@ -114,6 +114,7 @@ def test_repair_task_with_params(mock_databricks_hook):
     expected_payload = {
         "run_id": DATABRICKS_RUN_ID,
         "rerun_tasks": tasks_to_repair,
+        "latest_repair_id": 100,
         "overriding_parameters": {
             "key1": "value1",
             "key2": "value2",

Reply via email to