This is an automated email from the ASF dual-hosted git repository.

ibzib pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 5aa5075  [BEAM-10257] Add option defaults for Spark Python tests
     new 323b15f  Merge pull request #12006 from ibzib/BEAM-10257
5aa5075 is described below

commit 5aa5075fceeb0f9deb50454e9b0a0d04cb0644c0
Author: Kyle Weaver <kcwea...@google.com>
AuthorDate: Fri Jun 12 16:08:14 2020 -0700

    [BEAM-10257] Add option defaults for Spark Python tests
---
 .../apache_beam/runners/portability/spark_runner_test.py       | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/sdks/python/apache_beam/runners/portability/spark_runner_test.py 
b/sdks/python/apache_beam/runners/portability/spark_runner_test.py
index 1627ed6..d0c2c4e 100644
--- a/sdks/python/apache_beam/runners/portability/spark_runner_test.py
+++ b/sdks/python/apache_beam/runners/portability/spark_runner_test.py
@@ -28,6 +28,7 @@ from tempfile import mkdtemp
 
 from apache_beam.options.pipeline_options import DebugOptions
 from apache_beam.options.pipeline_options import PortableOptions
+from apache_beam.runners.portability import job_server
 from apache_beam.runners.portability import portable_runner
 from apache_beam.runners.portability import portable_runner_test
 
@@ -43,8 +44,8 @@ if __name__ == '__main__':
       '--spark_job_server_jar', help='Job server jar to submit jobs.')
   parser.add_argument(
       '--environment_type',
-      default='docker',
-      help='Environment type. docker or process')
+      default='loopback',
+      help='Environment type. docker, process, or loopback')
   parser.add_argument('--environment_config', help='Environment config.')
   parser.add_argument(
       '--environment_cache_millis',
@@ -57,7 +58,10 @@ if __name__ == '__main__':
   known_args, args = parser.parse_known_args(sys.argv)
   sys.argv = args
 
-  spark_job_server_jar = known_args.spark_job_server_jar
+  spark_job_server_jar = (
+      known_args.spark_job_server_jar or
+      job_server.JavaJarJobServer.path_to_beam_jar(
+          'runners:spark:job-server:shadowJar'))
   environment_type = known_args.environment_type.lower()
   environment_config = (
       known_args.environment_config if known_args.environment_config else None)

Reply via email to