This is an automated email from the ASF dual-hosted git repository.

amoghdesai pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 7e47ead9a5f Migrate apache/spark connection UI metadata to YAML 
(#62381)
7e47ead9a5f is described below

commit 7e47ead9a5f80fefc274b1781308d1059a0e797c
Author: Anish Giri <[email protected]>
AuthorDate: Thu Feb 26 03:43:56 2026 -0600

    Migrate apache/spark connection UI metadata to YAML (#62381)
---
 providers/apache/spark/provider.yaml               | 82 ++++++++++++++++++++++
 .../providers/apache/spark/get_provider_info.py    | 47 +++++++++++++
 2 files changed, 129 insertions(+)

diff --git a/providers/apache/spark/provider.yaml 
b/providers/apache/spark/provider.yaml
index 25ecf57df39..f0fda468cd3 100644
--- a/providers/apache/spark/provider.yaml
+++ b/providers/apache/spark/provider.yaml
@@ -111,12 +111,94 @@ hooks:
 connection-types:
   - hook-class-name: 
airflow.providers.apache.spark.hooks.spark_connect.SparkConnectHook
     connection-type: spark_connect
+    ui-field-behaviour:
+      hidden-fields:
+        - schema
+      relabeling:
+        password: Token
+        login: User ID
+    conn-fields:
+      use_ssl:
+        label: Use SSL
+        schema:
+          type:
+            - boolean
+            - 'null'
+          default: false
   - hook-class-name: 
airflow.providers.apache.spark.hooks.spark_jdbc.SparkJDBCHook
     connection-type: spark_jdbc
   - hook-class-name: 
airflow.providers.apache.spark.hooks.spark_sql.SparkSqlHook
     connection-type: spark_sql
+    ui-field-behaviour:
+      hidden-fields:
+        - schema
+        - login
+        - password
+        - extra
+      relabeling: {}
+    conn-fields:
+      queue:
+        label: YARN queue
+        description: Default YARN queue to use
+        schema:
+          type:
+            - string
+            - 'null'
   - hook-class-name: 
airflow.providers.apache.spark.hooks.spark_submit.SparkSubmitHook
     connection-type: spark
+    ui-field-behaviour:
+      hidden-fields:
+        - schema
+        - login
+        - password
+        - extra
+      relabeling: {}
+      placeholders:
+        keytab: '<base64 encoded Keytab Content>'
+    conn-fields:
+      queue:
+        label: YARN queue
+        description: Default YARN queue to use
+        schema:
+          type:
+            - string
+            - 'null'
+      deploy-mode:
+        label: Deploy mode
+        description: Must be client or cluster
+        schema:
+          type:
+            - string
+            - 'null'
+          default: client
+      spark-binary:
+        label: Spark binary
+        description: 'Must be one of: spark-submit, spark2-submit, 
spark3-submit'
+        schema:
+          type:
+            - string
+            - 'null'
+          default: spark-submit
+      namespace:
+        label: Kubernetes namespace
+        schema:
+          type:
+            - string
+            - 'null'
+      principal:
+        label: Principal
+        schema:
+          type:
+            - string
+            - 'null'
+      keytab:
+        label: Keytab
+        description: Run the command `base64 <your-keytab-path>` and use its 
output.
+        schema:
+          type:
+            - string
+            - 'null'
+          format: password
 
 task-decorators:
   - class-name: airflow.providers.apache.spark.decorators.pyspark.pyspark_task
diff --git 
a/providers/apache/spark/src/airflow/providers/apache/spark/get_provider_info.py
 
b/providers/apache/spark/src/airflow/providers/apache/spark/get_provider_info.py
index 4d96c86f923..bd4169f9f3a 100644
--- 
a/providers/apache/spark/src/airflow/providers/apache/spark/get_provider_info.py
+++ 
b/providers/apache/spark/src/airflow/providers/apache/spark/get_provider_info.py
@@ -62,6 +62,13 @@ def get_provider_info():
             {
                 "hook-class-name": 
"airflow.providers.apache.spark.hooks.spark_connect.SparkConnectHook",
                 "connection-type": "spark_connect",
+                "ui-field-behaviour": {
+                    "hidden-fields": ["schema"],
+                    "relabeling": {"password": "Token", "login": "User ID"},
+                },
+                "conn-fields": {
+                    "use_ssl": {"label": "Use SSL", "schema": {"type": 
["boolean", "null"], "default": False}}
+                },
             },
             {
                 "hook-class-name": 
"airflow.providers.apache.spark.hooks.spark_jdbc.SparkJDBCHook",
@@ -70,10 +77,50 @@ def get_provider_info():
             {
                 "hook-class-name": 
"airflow.providers.apache.spark.hooks.spark_sql.SparkSqlHook",
                 "connection-type": "spark_sql",
+                "ui-field-behaviour": {
+                    "hidden-fields": ["schema", "login", "password", "extra"],
+                    "relabeling": {},
+                },
+                "conn-fields": {
+                    "queue": {
+                        "label": "YARN queue",
+                        "description": "Default YARN queue to use",
+                        "schema": {"type": ["string", "null"]},
+                    }
+                },
             },
             {
                 "hook-class-name": 
"airflow.providers.apache.spark.hooks.spark_submit.SparkSubmitHook",
                 "connection-type": "spark",
+                "ui-field-behaviour": {
+                    "hidden-fields": ["schema", "login", "password", "extra"],
+                    "relabeling": {},
+                    "placeholders": {"keytab": "<base64 encoded Keytab 
Content>"},
+                },
+                "conn-fields": {
+                    "queue": {
+                        "label": "YARN queue",
+                        "description": "Default YARN queue to use",
+                        "schema": {"type": ["string", "null"]},
+                    },
+                    "deploy-mode": {
+                        "label": "Deploy mode",
+                        "description": "Must be client or cluster",
+                        "schema": {"type": ["string", "null"], "default": 
"client"},
+                    },
+                    "spark-binary": {
+                        "label": "Spark binary",
+                        "description": "Must be one of: spark-submit, 
spark2-submit, spark3-submit",
+                        "schema": {"type": ["string", "null"], "default": 
"spark-submit"},
+                    },
+                    "namespace": {"label": "Kubernetes namespace", "schema": 
{"type": ["string", "null"]}},
+                    "principal": {"label": "Principal", "schema": {"type": 
["string", "null"]}},
+                    "keytab": {
+                        "label": "Keytab",
+                        "description": "Run the command `base64 
<your-keytab-path>` and use its output.",
+                        "schema": {"type": ["string", "null"], "format": 
"password"},
+                    },
+                },
             },
         ],
         "task-decorators": [

Reply via email to