tvalentyn commented on a change in pull request #14738: URL: https://github.com/apache/beam/pull/14738#discussion_r626951681
########## File path: sdks/python/apache_beam/examples/snippets/snippets.py ########## @@ -229,41 +216,45 @@ def _add_argparse_args(cls, parser): # [END pipeline_options_define_custom] - # [START pipeline_options_dataflow_service] - import apache_beam as beam - from apache_beam.options.pipeline_options import PipelineOptions - - # Create and set your PipelineOptions. - # For Cloud execution, specify DataflowRunner and set the Cloud Platform - # project, job name, temporary files location, and region. - # For more information about regions, check: - # https://cloud.google.com/dataflow/docs/concepts/regional-endpoints - options = PipelineOptions( - flags=argv, - runner='DataflowRunner', - project='my-project-id', - job_name='unique-job-name', - temp_location='gs://my-bucket/temp', - region='us-central1') - - # Create the Pipeline with the specified options. - # with beam.Pipeline(options=options) as pipeline: - # pass # build your pipeline here. - # [END pipeline_options_dataflow_service] - - my_options = options.view_as(MyOptions) + @mock.patch('apache_beam.Pipeline') + def dataflow_options(mock_pipeline): + # [START pipeline_options_dataflow_service] + import sys + + import apache_beam as beam + from apache_beam.options.pipeline_options import PipelineOptions + + # Create and set your PipelineOptions. + # For Cloud execution, specify DataflowRunner and set the Cloud Platform + # project, job name, temporary files location, and region. + # For more information about regions, check: + # https://cloud.google.com/dataflow/docs/concepts/regional-endpoints + beam_options = PipelineOptions( + flags=sys.argv, Review comment: same here -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected]
