aaltay commented on a change in pull request #12814: URL: https://github.com/apache/airflow/pull/12814#discussion_r536302547
########## File path: airflow/providers/apache/beam/example_dags/example_beam.py ########## @@ -0,0 +1,262 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG for Apache Beam operators +""" +import os +from urllib.parse import urlparse + +from airflow import models +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.utils.dates import days_ago + +GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt') +GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/') +GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/') +GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output') Review comment: Curiosity question: Does `gs://test-dataflow-example/` exist as a publicly writable location? ########## File path: airflow/providers/apache/beam/example_dags/example_beam.py ########## @@ -0,0 +1,262 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG for Apache Beam operators +""" +import os +from urllib.parse import urlparse + +from airflow import models +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.utils.dates import days_ago + +GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt') +GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/') +GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/') +GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output') +GCS_PYTHON = os.environ.get('APACHE_BEAM_PYTHON', 'gs://test-dataflow-example/wordcount_debugging.py') + +GCS_JAR_DIRECT_RUNNER = os.environ.get( + 'APACHE_BEAM_DIRECT_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-DirectRunner.jar', +) +GCS_JAR_DATAFLOW_RUNNER = os.environ.get( + 'APACHE_BEAM_DATAFLOW_RUNNER_JAR', 'gs://test-dataflow-example/word-count-beam-bundled-0.1.jar' +) +GCS_JAR_SPARK_RUNNER = os.environ.get( + 'APACHE_BEAM_SPARK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-SparkRunner.jar', +) +GCS_JAR_FLINK_RUNNER = os.environ.get( + 'APACHE_BEAM_FLINK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-FlinkRunner.jar', +) + +GCS_JAR_DIRECT_RUNNER_PARTS = urlparse(GCS_JAR_DIRECT_RUNNER) +GCS_JAR_DIRECT_RUNNER_BUCKET_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.netloc +GCS_JAR_DIRECT_RUNNER_OBJECT_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.path[1:] +GCS_JAR_DATAFLOW_RUNNER_PARTS = urlparse(GCS_JAR_DATAFLOW_RUNNER) +GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.netloc +GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.path[1:] +GCS_JAR_SPARK_RUNNER_PARTS = urlparse(GCS_JAR_SPARK_RUNNER) +GCS_JAR_SPARK_RUNNER_BUCKET_NAME = GCS_JAR_SPARK_RUNNER_PARTS.netloc +GCS_JAR_SPARK_RUNNER_OBJECT_NAME = GCS_JAR_SPARK_RUNNER_PARTS.path[1:] +GCS_JAR_FLINK_RUNNER_PARTS = urlparse(GCS_JAR_FLINK_RUNNER) +GCS_JAR_FLINK_RUNNER_BUCKET_NAME = GCS_JAR_FLINK_RUNNER_PARTS.netloc +GCS_JAR_FLINK_RUNNER_OBJECT_NAME = GCS_JAR_FLINK_RUNNER_PARTS.path[1:] + + +default_args = { + 'default_pipeline_options': { + 'output': '/tmp/example_beam', + }, + "trigger_rule": "all_done", +} + + +with models.DAG( + "example_beam_native_java_direct_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_direct_runner: + + jar_to_local_direct_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_direct_runner", + bucket=GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_direct_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_direct_runner", + runner="DirectRunner", + jar="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_direct_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_direct_runner >> start_java_job_direct_runner + +with models.DAG( + "example_beam_native_java_dataflow_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_dataflow_runner: + + jar_to_local_dataflow_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_dataflow_runner", + bucket=GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_dataflow = BeamRunJavaPipelineOperator( + task_id="start_java_job_dataflow", + runner="DataflowRunner", + jar="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_dataflow_runner >> start_java_job_dataflow + +with models.DAG( + "example_beam_native_java_spark_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_spark_runner: + + jar_to_local_spark_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_spark_runner", + bucket=GCS_JAR_SPARK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_SPARK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_spark_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_spark_runner", + runner="SparkRunner", + jar="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_spark_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_spark_runner >> start_java_job_spark_runner + +with models.DAG( + "example_beam_native_java_flink_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_flink_runner: + + jar_to_local_flink_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_flink_runner", + bucket=GCS_JAR_FLINK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_FLINK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_flink_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_flink_runner", + runner="FlinkRunner", + jar="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_flink_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_flink_runner >> start_java_job_flink_runner + + +with models.DAG( + "example_beam_native_python", + default_args=default_args, + start_date=days_ago(1), + schedule_interval=None, # Override to match your needs + tags=['example'], +) as dag_native_python: + + start_python_job_local_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_local_direct_runner", + py_file='apache_beam.examples.wordcount', + py_options=['-m'], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_direct_runner", + py_file=GCS_PYTHON, + py_options=[], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_dataflow_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_dataflow_runner", + runner="DataflowRunner", + py_file=GCS_PYTHON, + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + py_options=[], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], Review comment: Use a newer version in examples? 2.25.0 is the latest, 2.26.0 is imminent. ########## File path: airflow/providers/apache/beam/example_dags/example_beam.py ########## @@ -0,0 +1,262 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG for Apache Beam operators +""" +import os +from urllib.parse import urlparse + +from airflow import models +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.utils.dates import days_ago + +GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt') +GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/') +GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/') +GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output') +GCS_PYTHON = os.environ.get('APACHE_BEAM_PYTHON', 'gs://test-dataflow-example/wordcount_debugging.py') + +GCS_JAR_DIRECT_RUNNER = os.environ.get( + 'APACHE_BEAM_DIRECT_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-DirectRunner.jar', +) +GCS_JAR_DATAFLOW_RUNNER = os.environ.get( + 'APACHE_BEAM_DATAFLOW_RUNNER_JAR', 'gs://test-dataflow-example/word-count-beam-bundled-0.1.jar' +) +GCS_JAR_SPARK_RUNNER = os.environ.get( + 'APACHE_BEAM_SPARK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-SparkRunner.jar', +) +GCS_JAR_FLINK_RUNNER = os.environ.get( + 'APACHE_BEAM_FLINK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-FlinkRunner.jar', +) + +GCS_JAR_DIRECT_RUNNER_PARTS = urlparse(GCS_JAR_DIRECT_RUNNER) +GCS_JAR_DIRECT_RUNNER_BUCKET_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.netloc +GCS_JAR_DIRECT_RUNNER_OBJECT_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.path[1:] +GCS_JAR_DATAFLOW_RUNNER_PARTS = urlparse(GCS_JAR_DATAFLOW_RUNNER) +GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.netloc +GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.path[1:] +GCS_JAR_SPARK_RUNNER_PARTS = urlparse(GCS_JAR_SPARK_RUNNER) +GCS_JAR_SPARK_RUNNER_BUCKET_NAME = GCS_JAR_SPARK_RUNNER_PARTS.netloc +GCS_JAR_SPARK_RUNNER_OBJECT_NAME = GCS_JAR_SPARK_RUNNER_PARTS.path[1:] +GCS_JAR_FLINK_RUNNER_PARTS = urlparse(GCS_JAR_FLINK_RUNNER) +GCS_JAR_FLINK_RUNNER_BUCKET_NAME = GCS_JAR_FLINK_RUNNER_PARTS.netloc +GCS_JAR_FLINK_RUNNER_OBJECT_NAME = GCS_JAR_FLINK_RUNNER_PARTS.path[1:] + + +default_args = { + 'default_pipeline_options': { + 'output': '/tmp/example_beam', + }, + "trigger_rule": "all_done", +} + + +with models.DAG( + "example_beam_native_java_direct_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_direct_runner: + + jar_to_local_direct_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_direct_runner", + bucket=GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_direct_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_direct_runner", + runner="DirectRunner", + jar="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_direct_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_direct_runner >> start_java_job_direct_runner + +with models.DAG( + "example_beam_native_java_dataflow_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_dataflow_runner: + + jar_to_local_dataflow_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_dataflow_runner", + bucket=GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_dataflow = BeamRunJavaPipelineOperator( + task_id="start_java_job_dataflow", + runner="DataflowRunner", + jar="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_dataflow_runner >> start_java_job_dataflow + +with models.DAG( + "example_beam_native_java_spark_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_spark_runner: + + jar_to_local_spark_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_spark_runner", + bucket=GCS_JAR_SPARK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_SPARK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_spark_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_spark_runner", + runner="SparkRunner", + jar="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_spark_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_spark_runner >> start_java_job_spark_runner + +with models.DAG( + "example_beam_native_java_flink_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_flink_runner: + + jar_to_local_flink_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_flink_runner", + bucket=GCS_JAR_FLINK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_FLINK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_flink_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_flink_runner", + runner="FlinkRunner", + jar="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_flink_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_flink_runner >> start_java_job_flink_runner + + +with models.DAG( + "example_beam_native_python", + default_args=default_args, + start_date=days_ago(1), + schedule_interval=None, # Override to match your needs + tags=['example'], +) as dag_native_python: + + start_python_job_local_direct_runner = BeamRunPythonPipelineOperator( Review comment: I think this and start_python_job_direct_runner are a bit duplicative. They more or less do the same exact thing except for the location of module to run. ########## File path: airflow/providers/apache/beam/hooks/beam.py ########## @@ -0,0 +1,234 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module contains a Apache Beam Hook.""" +import json +import select +import shlex +import subprocess +import textwrap +from tempfile import TemporaryDirectory +from typing import List, Optional + +from airflow.exceptions import AirflowException +from airflow.hooks.base_hook import BaseHook +from airflow.utils.log.logging_mixin import LoggingMixin +from airflow.utils.python_virtualenv import prepare_virtualenv + + +class _BeamRunner(LoggingMixin): + def __init__( + self, + cmd: List[str], + ) -> None: + super().__init__() + self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd)) + self._proc = subprocess.Popen( + cmd, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + ) + + def _process_fd(self, fd): + """ + Prints output to logs. + + :param fd: File descriptor. + """ + if fd == self._proc.stderr: + while True: + line = self._proc.stderr.readline().decode() + if not line: + return + self.log.warning(line.rstrip("\n")) + + if fd == self._proc.stdout: + while True: + line = self._proc.stdout.readline().decode() + if not line: + return + self.log.info(line.rstrip("\n")) + + raise Exception("No data in stderr or in stdout.") + + def wait_for_done(self) -> None: + """Waits for Apache Beam pipeline to complete.""" + self.log.info("Start waiting for Apache Beam process to complete.") + reads = [self._proc.stderr, self._proc.stdout] + while True: + # Wait for at least one available fd. + readable_fds, _, _ = select.select(reads, [], [], 5) + if readable_fds is None: + self.log.info("Waiting for Apache Beam process to complete.") + continue + + for readable_fd in readable_fds: + self._process_fd(readable_fd) + + if self._proc.poll() is not None: + break + + # Corner case: check if more output was created between the last read and the process termination + for readable_fd in reads: + self._process_fd(readable_fd) + + self.log.info("Process exited with return code: %s", self._proc.returncode) Review comment: Is it possible to capture stdout or stderr, that usually has the real relevant errors. ########## File path: tests/providers/apache/beam/hooks/test_beam.py ########## @@ -0,0 +1,233 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import copy +import unittest +from unittest import mock +from unittest.mock import MagicMock + +from parameterized import parameterized + +from airflow.exceptions import AirflowException +from airflow.providers.apache.beam.hooks.beam import BeamHook, _BeamRunner + +PY_FILE = 'apache_beam.examples.wordcount' +JAR_FILE = 'unitest.jar' +JOB_CLASS = 'com.example.UnitTest' +PY_OPTIONS = ['-m'] +TEST_JOB_ID = 'test-job-id' + +DEFAULT_RUNNER = "DirectRunner" +BEAM_STRING = 'airflow.providers.apache.beam.hooks.beam.{}' +BEAM_VARIABLES_PY = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}} +BEAM_VARIABLES_JAVA = { + 'output': 'gs://test/output', + 'labels': {'foo': 'bar'}, +} + +APACHE_BEAM_V_2_14_0_JAVA_SDK_LOG = f""""\ +Dataflow SDK version: 2.14.0 +Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run +INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow\ +/jobsDetail/locations/europe-west3/jobs/{TEST_JOB_ID}?project=XXX +Submitted job: {TEST_JOB_ID} +Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run +INFO: To cancel the job using the 'gcloud' tool, run: +> gcloud dataflow jobs --project=XXX cancel --region=europe-west3 {TEST_JOB_ID} +""" + + +class TestBeamHook(unittest.TestCase): + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_python_pipeline(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + ) + + expected_cmd = [ + "python3", + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd) + wait_for_done.assert_called_once_with() + + @parameterized.expand( + [ + ('default_to_python3', 'python3'), + ('major_version_2', 'python2'), + ('major_version_3', 'python3'), + ('minor_version', 'python3.6'), + ] + ) + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_python_pipeline_with_custom_interpreter(self, _, py_interpreter, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=py_interpreter, + ) + + expected_cmd = [ + py_interpreter, + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd) + wait_for_done.assert_called_once_with() + + @parameterized.expand( + [ + (['foo-bar'], False), + (['foo-bar'], True), + ([], True), + ] + ) + @mock.patch(BEAM_STRING.format('prepare_virtualenv')) + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_python_pipeline_with_non_empty_py_requirements_and_without_system_packages( + self, current_py_requirements, current_py_system_site_packages, mock_runner, mock_virtualenv + ): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + mock_virtualenv.return_value = '/dummy_dir/bin/python' + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_requirements=current_py_requirements, + py_system_site_packages=current_py_system_site_packages, + ) + + expected_cmd = [ + '/dummy_dir/bin/python', + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd) + wait_for_done.assert_called_once_with() + mock_virtualenv.assert_called_once_with( + venv_directory=mock.ANY, + python_bin="python3", + system_site_packages=current_py_system_site_packages, + requirements=current_py_requirements, + ) + + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_python_pipeline_with_empty_py_requirements_and_without_system_packages(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + + with self.assertRaisesRegex(AirflowException, "Invalid method invocation."): + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_requirements=[], + ) + + mock_runner.assert_not_called() + wait_for_done.assert_not_called() + + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_java_pipeline(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + + hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + jar=JAR_FILE, + variables=copy.deepcopy(BEAM_VARIABLES_JAVA), + ) + + expected_cmd = [ + 'java', + '-jar', + JAR_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels={"foo":"bar"}', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd) + wait_for_done.assert_called_once_with() + + @mock.patch(BEAM_STRING.format('_BeamRunner')) + def test_start_java_pipeline_with_job_class(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + + hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + jar=JAR_FILE, variables=copy.deepcopy(BEAM_VARIABLES_JAVA), job_class=JOB_CLASS + ) + + expected_cmd = [ + 'java', + '-cp', + JAR_FILE, + JOB_CLASS, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels={"foo":"bar"}', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd) + wait_for_done.assert_called_once_with() + + +class TestDataflow(unittest.TestCase): Review comment: You can also run a real Beam pipeline with DirectRunner as another test. ########## File path: airflow/providers/apache/beam/provider.yaml ########## @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-apache-beam +name: Apache Beam +description: | + `Apache Druid <https://druid.apache.org/>`__. Review comment: Apache Beam ... ? ########## File path: airflow/providers/apache/beam/example_dags/example_beam.py ########## @@ -0,0 +1,262 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG for Apache Beam operators +""" +import os +from urllib.parse import urlparse + +from airflow import models +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.utils.dates import days_ago + +GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt') +GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/') +GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/') +GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output') +GCS_PYTHON = os.environ.get('APACHE_BEAM_PYTHON', 'gs://test-dataflow-example/wordcount_debugging.py') + +GCS_JAR_DIRECT_RUNNER = os.environ.get( + 'APACHE_BEAM_DIRECT_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-DirectRunner.jar', +) +GCS_JAR_DATAFLOW_RUNNER = os.environ.get( + 'APACHE_BEAM_DATAFLOW_RUNNER_JAR', 'gs://test-dataflow-example/word-count-beam-bundled-0.1.jar' +) +GCS_JAR_SPARK_RUNNER = os.environ.get( + 'APACHE_BEAM_SPARK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-SparkRunner.jar', +) +GCS_JAR_FLINK_RUNNER = os.environ.get( + 'APACHE_BEAM_FLINK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-FlinkRunner.jar', +) + +GCS_JAR_DIRECT_RUNNER_PARTS = urlparse(GCS_JAR_DIRECT_RUNNER) +GCS_JAR_DIRECT_RUNNER_BUCKET_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.netloc +GCS_JAR_DIRECT_RUNNER_OBJECT_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.path[1:] +GCS_JAR_DATAFLOW_RUNNER_PARTS = urlparse(GCS_JAR_DATAFLOW_RUNNER) +GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.netloc +GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.path[1:] +GCS_JAR_SPARK_RUNNER_PARTS = urlparse(GCS_JAR_SPARK_RUNNER) +GCS_JAR_SPARK_RUNNER_BUCKET_NAME = GCS_JAR_SPARK_RUNNER_PARTS.netloc +GCS_JAR_SPARK_RUNNER_OBJECT_NAME = GCS_JAR_SPARK_RUNNER_PARTS.path[1:] +GCS_JAR_FLINK_RUNNER_PARTS = urlparse(GCS_JAR_FLINK_RUNNER) +GCS_JAR_FLINK_RUNNER_BUCKET_NAME = GCS_JAR_FLINK_RUNNER_PARTS.netloc +GCS_JAR_FLINK_RUNNER_OBJECT_NAME = GCS_JAR_FLINK_RUNNER_PARTS.path[1:] + + +default_args = { + 'default_pipeline_options': { + 'output': '/tmp/example_beam', + }, + "trigger_rule": "all_done", +} + + +with models.DAG( + "example_beam_native_java_direct_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_direct_runner: + + jar_to_local_direct_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_direct_runner", + bucket=GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_direct_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_direct_runner", + runner="DirectRunner", + jar="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_direct_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_direct_runner >> start_java_job_direct_runner + +with models.DAG( + "example_beam_native_java_dataflow_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_dataflow_runner: + + jar_to_local_dataflow_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_dataflow_runner", + bucket=GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_dataflow = BeamRunJavaPipelineOperator( + task_id="start_java_job_dataflow", + runner="DataflowRunner", + jar="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_dataflow_runner >> start_java_job_dataflow + +with models.DAG( + "example_beam_native_java_spark_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_spark_runner: + + jar_to_local_spark_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_spark_runner", + bucket=GCS_JAR_SPARK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_SPARK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_spark_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_spark_runner", + runner="SparkRunner", + jar="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_spark_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_spark_runner >> start_java_job_spark_runner + +with models.DAG( + "example_beam_native_java_flink_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_flink_runner: + + jar_to_local_flink_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_flink_runner", + bucket=GCS_JAR_FLINK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_FLINK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + ) + + start_java_job_flink_runner = BeamRunJavaPipelineOperator( + task_id="start_java_job_flink_runner", + runner="FlinkRunner", + jar="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_java_job_flink_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_flink_runner >> start_java_job_flink_runner + + +with models.DAG( + "example_beam_native_python", + default_args=default_args, + start_date=days_ago(1), + schedule_interval=None, # Override to match your needs + tags=['example'], +) as dag_native_python: + + start_python_job_local_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_local_direct_runner", + py_file='apache_beam.examples.wordcount', + py_options=['-m'], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_direct_runner", + py_file=GCS_PYTHON, + py_options=[], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_dataflow_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_dataflow_runner", + runner="DataflowRunner", + py_file=GCS_PYTHON, + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + py_options=[], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_local_spark_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_local_spark_runner", + py_file='apache_beam.examples.wordcount', + runner="SparkRunner", + py_options=['-m'], + job_name='{{task.task_id}}', + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_local_flink_runner = BeamRunPythonPipelineOperator( + task_id="start_python_job_local_flink_runner", + py_file='apache_beam.examples.wordcount', + runner="FlinkRunner", + py_options=['-m'], + job_name='{{task.task_id}}', + pipeline_options={ + 'output': '/tmp/start_python_job_local_flink_runner', + }, + py_requirements=['apache-beam[gcp]==2.21.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_job_local_direct_runner >> start_python_job_local_flink_runner Review comment: Are these two lines equivalent to [start_python_job_local_direct_runner, start_python_job_direct_runner] >> start_python_job_local_flink_runner ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected]
