This is an automated email from the ASF dual-hosted git repository.
eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new fca550ca5fe Fix unclear error when pyspark is not installed for JDBC
script (#64174)
fca550ca5fe is described below
commit fca550ca5fe08ef99d03843d45af6fffd72b7401
Author: Elad Kalif <[email protected]>
AuthorDate: Tue Mar 24 19:59:48 2026 +0200
Fix unclear error when pyspark is not installed for JDBC script (#64174)
---
.../src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
index 862d678a74f..2ace85961fd 100644
---
a/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
+++
b/providers/apache/spark/src/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
@@ -26,7 +26,12 @@ if TYPE_CHECKING:
try:
from pyspark.sql import SparkSession
except ImportError:
- pass
+ from airflow.providers.common.compat.sdk import
AirflowOptionalProviderFeatureException
+
+ raise AirflowOptionalProviderFeatureException(
+ "pyspark is required to run spark_jdbc_script. "
+ "Install it with: pip install
'apache-airflow-providers-apache-spark[pyspark]'"
+ )
SPARK_WRITE_TO_JDBC: str = "spark_to_jdbc"
SPARK_READ_FROM_JDBC: str = "jdbc_to_spark"