e-galan commented on code in PR #37969:
URL: https://github.com/apache/airflow/pull/37969#discussion_r1528226597
##########
airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py:
##########
@@ -243,15 +245,110 @@ def run_pipeline_job(
location=region,
failure_policy=failure_policy,
)
+ self._pipeline_job.submit(
+ service_account=service_account,
+ network=network,
+ create_request_timeout=create_request_timeout,
+ experiment=experiment,
+ )
+ self._pipeline_job.wait()
+
+ return self._pipeline_job
+
+ @GoogleBaseHook.fallback_to_default_project_id
+ def submit_pipeline_job(
+ self,
+ project_id: str,
+ region: str,
+ display_name: str,
+ template_path: str,
+ job_id: str | None = None,
+ pipeline_root: str | None = None,
+ parameter_values: dict[str, Any] | None = None,
+ input_artifacts: dict[str, str] | None = None,
+ enable_caching: bool | None = None,
+ encryption_spec_key_name: str | None = None,
+ labels: dict[str, str] | None = None,
+ failure_policy: str | None = None,
+ # START: run param
+ service_account: str | None = None,
+ network: str | None = None,
+ create_request_timeout: float | None = None,
+ experiment: str | experiment_resources.Experiment | None = None,
+ # END: run param
+ ) -> PipelineJob:
+ """
+ Create and start a PipelineJob run.
+ For more info about the client method please see:
+
https://cloud.google.com/python/docs/reference/aiplatform/latest/google.cloud.aiplatform.PipelineJob#google_cloud_aiplatform_PipelineJob_submit
+
+ :param project_id: Required. The ID of the Google Cloud project that
the service belongs to.
Review Comment:
I agree, it does not seem necessary, but the same parameters and docstrings
are used in the other class methods, so I just decided to stick with it.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]