From 2d566f318dda0c4b409bb1209c83d4e17245d5c4 Mon Sep 17 00:00:00 2001 From: dirrao Date: Sat, 10 Aug 2024 15:51:44 +0530 Subject: [PATCH 1/2] spark kubernetes operator arguments description reordering --- .../cncf/kubernetes/operators/spark_kubernetes.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py index d9c3425f6e143..a36f2b60f8d12 100644 --- a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py @@ -48,24 +48,24 @@ class SparkKubernetesOperator(KubernetesPodOperator): For more detail about Spark Application Object have a look at the reference: https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.3.3-3.1.1/docs/api-docs.md#sparkapplication - :param application_file: filepath to kubernetes custom_resource_definition of sparkApplication - :param kubernetes_conn_id: the connection to Kubernetes cluster :param image: Docker image you wish to launch. Defaults to hub.docker.com, :param code_path: path to the spark code in image, :param namespace: kubernetes namespace to put sparkApplication - :param cluster_context: context of the cluster - :param application_file: yaml file if passed + :param application_file: filepath to kubernetes custom_resource_definition of sparkApplication + :param template_spec: kubernetes sparkApplication specification :param get_logs: get the stdout of the container as logs of the tasks. :param do_xcom_push: If True, the content of the file /airflow/xcom/return.json in the container will also be pushed to an XCom when the container completes. :param success_run_history_limit: Number of past successful runs of the application to keep. - :param delete_on_termination: What to do when the pod reaches its final - state, or the execution is interrupted. If True (default), delete the - pod; if False, leave the pod. :param startup_timeout_seconds: timeout in seconds to startup the pod. :param log_events_on_failure: Log the pod's events if a failure occurs :param reattach_on_restart: if the scheduler dies while the pod is running, reattach and monitor + :param delete_on_termination: What to do when the pod reaches its final + state, or the execution is interrupted. If True (default), delete the + pod; if False, leave the pod. + :param kubernetes_conn_id: the connection to Kubernetes cluster + :param cluster_context: context of the cluster """ template_fields = ["application_file", "namespace", "template_spec"] From 28d94610e42c0ae6082119d548ccf350d7e18e54 Mon Sep 17 00:00:00 2001 From: dirrao Date: Sun, 11 Aug 2024 10:56:55 +0530 Subject: [PATCH 2/2] spark kubernetes operator arguments description reordering --- .../providers/cncf/kubernetes/operators/spark_kubernetes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py index a36f2b60f8d12..82df0a2ec90c0 100644 --- a/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py +++ b/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py @@ -51,6 +51,9 @@ class SparkKubernetesOperator(KubernetesPodOperator): :param image: Docker image you wish to launch. Defaults to hub.docker.com, :param code_path: path to the spark code in image, :param namespace: kubernetes namespace to put sparkApplication + :param name: name of the pod in which the task will run, will be used (plus a random + suffix if random_name_suffix is True) to generate a pod id (DNS-1123 subdomain, + containing only [a-z0-9.-]). :param application_file: filepath to kubernetes custom_resource_definition of sparkApplication :param template_spec: kubernetes sparkApplication specification :param get_logs: get the stdout of the container as logs of the tasks. @@ -65,7 +68,6 @@ class SparkKubernetesOperator(KubernetesPodOperator): state, or the execution is interrupted. If True (default), delete the pod; if False, leave the pod. :param kubernetes_conn_id: the connection to Kubernetes cluster - :param cluster_context: context of the cluster """ template_fields = ["application_file", "namespace", "template_spec"]