This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 2584e8585ad Use ValueError instead of RuntimeError when resolving 
SparkSubmitHook connection (#56631)
2584e8585ad is described below

commit 2584e8585adeaf378626138bf6fdc47ce7019516
Author: Ephraim Anierobi <[email protected]>
AuthorDate: Tue Oct 14 18:16:04 2025 +0100

    Use ValueError instead of RuntimeError when resolving SparkSubmitHook 
connection (#56631)
    
    In the future, connection error would raise RuntimeError instead of 
AirflowException
     and in that case, if we also have the RuntimeError in the Spark connection,
    we won't be able to raise it as we would like to catch the RuntimeError 
from connection.
    
    Also, looking at what the code does, ValueError is more appropriate
---
 .../spark/src/airflow/providers/apache/spark/hooks/spark_submit.py  | 6 +++---
 .../apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py | 4 ++--
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git 
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
 
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
index d7c411b172b..eba5292dd2d 100644
--- 
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
+++ 
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -279,14 +279,14 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
             if not self.spark_binary:
                 self.spark_binary = extra.get("spark-binary", 
DEFAULT_SPARK_BINARY)
                 if self.spark_binary is not None and self.spark_binary not in 
ALLOWED_SPARK_BINARIES:
-                    raise RuntimeError(
-                        f"The spark-binary extra can be on of 
{ALLOWED_SPARK_BINARIES} and it"
+                    raise ValueError(
+                        f"The spark-binary extra can be one of 
{ALLOWED_SPARK_BINARIES} and it"
                         f" was `{self.spark_binary}`. Please make sure your 
spark binary is one of the"
                         f" allowed ones and that it is available on the PATH"
                     )
             conn_spark_home = extra.get("spark-home")
             if conn_spark_home:
-                raise RuntimeError(
+                raise ValueError(
                     "The `spark-home` extra is not allowed any more. Please 
make sure one of"
                     f" {ALLOWED_SPARK_BINARIES} is available on the PATH, and 
set `spark-binary`"
                     " if needed."
diff --git 
a/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py 
b/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
index 2f6c6d21090..e19a7f73611 100644
--- a/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
+++ b/providers/apache/spark/tests/unit/apache/spark/hooks/test_spark_submit.py
@@ -536,11 +536,11 @@ class TestSparkSubmitHook:
         SparkSubmitHook(conn_id="spark_binary_set", 
spark_binary="another-custom-spark-submit")
 
     def 
test_resolve_connection_spark_binary_extra_not_allowed_runtime_error(self):
-        with pytest.raises(RuntimeError):
+        with pytest.raises(ValueError):
             SparkSubmitHook(conn_id="spark_custom_binary_set")
 
     def test_resolve_connection_spark_home_not_allowed_runtime_error(self):
-        with pytest.raises(RuntimeError):
+        with pytest.raises(ValueError):
             SparkSubmitHook(conn_id="spark_home_set")
 
     def test_resolve_connection_spark_binary_default_value_override(self):

Reply via email to