This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 2b1ae5f811 reorder docstring of `SparkKubernetesOperator` (#41372)
2b1ae5f811 is described below

commit 2b1ae5f811457f95aefb589f9d85dd0eac58267f
Author: Gopal Dirisala <[email protected]>
AuthorDate: Mon Aug 12 10:55:32 2024 +0530

    reorder docstring of `SparkKubernetesOperator` (#41372)
    
    * spark kubernetes operator arguments description reordering
    
    * spark kubernetes operator arguments description reordering
---
 .../cncf/kubernetes/operators/spark_kubernetes.py        | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py 
b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
index d9c3425f6e..82df0a2ec9 100644
--- a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
@@ -48,24 +48,26 @@ class SparkKubernetesOperator(KubernetesPodOperator):
         For more detail about Spark Application Object have a look at the 
reference:
         
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.3.3-3.1.1/docs/api-docs.md#sparkapplication
 
-    :param application_file: filepath to kubernetes custom_resource_definition 
of sparkApplication
-    :param kubernetes_conn_id: the connection to Kubernetes cluster
     :param image: Docker image you wish to launch. Defaults to hub.docker.com,
     :param code_path: path to the spark code in image,
     :param namespace: kubernetes namespace to put sparkApplication
-    :param cluster_context: context of the cluster
-    :param application_file: yaml file if passed
+    :param name: name of the pod in which the task will run, will be used 
(plus a random
+        suffix if random_name_suffix is True) to generate a pod id (DNS-1123 
subdomain,
+        containing only [a-z0-9.-]).
+    :param application_file: filepath to kubernetes custom_resource_definition 
of sparkApplication
+    :param template_spec: kubernetes sparkApplication specification
     :param get_logs: get the stdout of the container as logs of the tasks.
     :param do_xcom_push: If True, the content of the file
         /airflow/xcom/return.json in the container will also be pushed to an
         XCom when the container completes.
     :param success_run_history_limit: Number of past successful runs of the 
application to keep.
-    :param delete_on_termination: What to do when the pod reaches its final
-        state, or the execution is interrupted. If True (default), delete the
-        pod; if False, leave the pod.
     :param startup_timeout_seconds: timeout in seconds to startup the pod.
     :param log_events_on_failure: Log the pod's events if a failure occurs
     :param reattach_on_restart: if the scheduler dies while the pod is 
running, reattach and monitor
+    :param delete_on_termination: What to do when the pod reaches its final
+        state, or the execution is interrupted. If True (default), delete the
+        pod; if False, leave the pod.
+    :param kubernetes_conn_id: the connection to Kubernetes cluster
     """
 
     template_fields = ["application_file", "namespace", "template_spec"]

Reply via email to