This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 1839b683e7 Allow downloading requirements file from GCS in
`BeamRunPythonPipelineOperator` (#31645)
1839b683e7 is described below
commit 1839b683e7974f7f553af2689a64ee36cf1ec006
Author: Peng Yu <[email protected]>
AuthorDate: Sun Aug 6 11:17:04 2023 -0400
Allow downloading requirements file from GCS in
`BeamRunPythonPipelineOperator` (#31645)
* Update beam python run operator to download requirements if needed
---------
Co-authored-by: Hussein Awala <[email protected]>
Co-authored-by: eladkal <[email protected]>
Co-authored-by: Peng Yu <[email protected]>
---
airflow/providers/apache/beam/operators/beam.py | 7 ++++++-
tests/providers/apache/beam/operators/test_beam.py | 17 +++++++++++++----
2 files changed, 19 insertions(+), 5 deletions(-)
diff --git a/airflow/providers/apache/beam/operators/beam.py
b/airflow/providers/apache/beam/operators/beam.py
index cc7d217abb..c39ab916f7 100644
--- a/airflow/providers/apache/beam/operators/beam.py
+++ b/airflow/providers/apache/beam/operators/beam.py
@@ -309,10 +309,15 @@ class
BeamRunPythonPipelineOperator(BeamBasePipelineOperator):
def execute_sync(self, context: Context):
with ExitStack() as exit_stack:
+ gcs_hook = GCSHook(gcp_conn_id=self.gcp_conn_id)
if self.py_file.lower().startswith("gs://"):
- gcs_hook = GCSHook(gcp_conn_id=self.gcp_conn_id)
tmp_gcs_file =
exit_stack.enter_context(gcs_hook.provide_file(object_url=self.py_file))
self.py_file = tmp_gcs_file.name
+ if self.snake_case_pipeline_options.get("requirements_file",
"").startswith("gs://"):
+ tmp_req_file = exit_stack.enter_context(
+
gcs_hook.provide_file(object_url=self.snake_case_pipeline_options["requirements_file"])
+ )
+ self.snake_case_pipeline_options["requirements_file"] =
tmp_req_file.name
if self.is_dataflow and self.dataflow_hook:
with self.dataflow_hook.provide_authorized_gcloud():
diff --git a/tests/providers/apache/beam/operators/test_beam.py
b/tests/providers/apache/beam/operators/test_beam.py
index c5949dd868..d67b3ff647 100644
--- a/tests/providers/apache/beam/operators/test_beam.py
+++ b/tests/providers/apache/beam/operators/test_beam.py
@@ -39,6 +39,7 @@ JOB_ID = "test-dataflow-pipeline-id"
JAR_FILE = "gs://my-bucket/example/test.jar"
JOB_CLASS = "com.test.NotMain"
PY_FILE = "gs://my-bucket/my-object.py"
+REQURIEMENTS_FILE = "gs://my-bucket/my-requirements.txt"
PY_INTERPRETER = "python3"
PY_OPTIONS = ["-m"]
GO_FILE = "gs://my-bucket/example/main.go"
@@ -48,6 +49,10 @@ DEFAULT_OPTIONS = {
"project": "test",
"stagingLocation": "gs://test/staging",
}
+PY_DEFAULT_OPTIONS = {
+ **DEFAULT_OPTIONS,
+ "requirements_file": REQURIEMENTS_FILE,
+}
ADDITIONAL_OPTIONS = {"output": "gs://test/output", "labels": {"foo": "bar"}}
TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
EXPECTED_ADDITIONAL_OPTIONS = {
@@ -63,7 +68,7 @@ class TestBeamRunPythonPipelineOperator:
task_id=TASK_ID,
py_file=PY_FILE,
py_options=PY_OPTIONS,
- default_pipeline_options=DEFAULT_OPTIONS,
+ default_pipeline_options=PY_DEFAULT_OPTIONS,
pipeline_options=ADDITIONAL_OPTIONS,
)
@@ -74,7 +79,7 @@ class TestBeamRunPythonPipelineOperator:
assert self.operator.runner == DEFAULT_RUNNER
assert self.operator.py_options == PY_OPTIONS
assert self.operator.py_interpreter == PY_INTERPRETER
- assert self.operator.default_pipeline_options == DEFAULT_OPTIONS
+ assert self.operator.default_pipeline_options == PY_DEFAULT_OPTIONS
assert self.operator.pipeline_options == EXPECTED_ADDITIONAL_OPTIONS
@mock.patch("airflow.providers.apache.beam.operators.beam.BeamHook")
@@ -92,8 +97,10 @@ class TestBeamRunPythonPipelineOperator:
"staging_location": "gs://test/staging",
"output": "gs://test/output",
"labels": {"foo": "bar", "airflow-version": TEST_VERSION},
+ "requirements_file":
gcs_provide_file.return_value.__enter__.return_value.name,
}
- gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
+ gcs_provide_file.assert_any_call(object_url=PY_FILE)
+ gcs_provide_file.assert_any_call(object_url=REQURIEMENTS_FILE)
start_python_hook.assert_called_once_with(
variables=expected_options,
py_file=gcs_provide_file.return_value.__enter__.return_value.name,
@@ -134,8 +141,10 @@ class TestBeamRunPythonPipelineOperator:
"labels": {"foo": "bar", "airflow-version": TEST_VERSION},
"region": "us-central1",
"impersonate_service_account": TEST_IMPERSONATION_ACCOUNT,
+ "requirements_file":
gcs_provide_file.return_value.__enter__.return_value.name,
}
- gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
+ gcs_provide_file.assert_any_call(object_url=PY_FILE)
+ gcs_provide_file.assert_any_call(object_url=REQURIEMENTS_FILE)
persist_link_mock.assert_called_once_with(
self.operator,
None,