[ 
https://issues.apache.org/jira/browse/BEAM-1251?focusedWorklogId=120519&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-120519
 ]

ASF GitHub Bot logged work on BEAM-1251:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 08/Jul/18 21:32
            Start Date: 08/Jul/18 21:32
    Worklog Time Spent: 10m 
      Work Description: charlesccychen closed pull request #5842: [BEAM-1251] 
Modernize Python 2 code to get ready for Python 3
URL: https://github.com/apache/beam/pull/5842
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.test-infra/jenkins/dependency_check/bigquery_client_utils.py 
b/.test-infra/jenkins/dependency_check/bigquery_client_utils.py
index 08571b7dce3..f7cd0fe24b9 100644
--- a/.test-infra/jenkins/dependency_check/bigquery_client_utils.py
+++ b/.test-infra/jenkins/dependency_check/bigquery_client_utils.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -101,7 +102,7 @@ def insert_dep_to_table(self, dep, version, release_date, 
is_currently_used=Fals
     try:
       query_job = self.bigquery_client.query(query)
       if not query_job.done():
-        print query_job.result()
+        print(query_job.result())
     except:
       raise
 
@@ -123,7 +124,7 @@ def delete_dep_from_table(self, dep, version):
     try:
       query_job = self.bigquery_client.query(query)
       if not query_job.done():
-        print query_job.result()
+        print(query_job.result())
     except:
       raise
 
diff --git 
a/.test-infra/jenkins/dependency_check/dependency_check_report_generator.py 
b/.test-infra/jenkins/dependency_check/dependency_check_report_generator.py
index 1703375e695..981d9eb4b05 100644
--- a/.test-infra/jenkins/dependency_check/dependency_check_report_generator.py
+++ b/.test-infra/jenkins/dependency_check/dependency_check_report_generator.py
@@ -56,7 +56,7 @@ def extract_results(file_path):
           see_oudated_deps = True
     raw_report.close()
     return outdated_deps
-  except Exception, e:
+  except Exception as e:
     raise
 
 
@@ -266,7 +266,7 @@ def generate_report(file_path, sdk_type, project_id, 
dataset_id, table_id):
     for dep in high_priority_deps:
       report.write("%s" % dep)
     report.write("</table>\n")
-  except Exception, e:
+  except Exception as e:
     report.write('<p> {0} </p>'.format(str(e)))
 
   report.close()
diff --git 
a/.test-infra/jenkins/dependency_check/dependency_check_report_generator_test.py
 
b/.test-infra/jenkins/dependency_check/dependency_check_report_generator_test.py
index 3a48f2a7230..770527a04c6 100644
--- 
a/.test-infra/jenkins/dependency_check/dependency_check_report_generator_test.py
+++ 
b/.test-infra/jenkins/dependency_check/dependency_check_report_generator_test.py
@@ -19,6 +19,7 @@
 #   This script performs testing of scenarios from 
verify_performance_test_results.py
 #
 
+from __future__ import print_function
 import unittest, mock
 from mock import patch
 from datetime import datetime
@@ -39,7 +40,7 @@ class DependencyCheckReportGeneratorTest(unittest.TestCase):
   """Tests for `dependency_check_report_generator.py`."""
 
   def setUp(self):
-    print "Test name:", self._testMethodName
+    print("Test name:", self._testMethodName)
 
 
   @patch('google.cloud.bigquery.Client')
diff --git a/sdks/python/apache_beam/runners/worker/sdk_worker.py 
b/sdks/python/apache_beam/runners/worker/sdk_worker.py
index b8fa422536b..0ebcea5bacc 100644
--- a/sdks/python/apache_beam/runners/worker/sdk_worker.py
+++ b/sdks/python/apache_beam/runners/worker/sdk_worker.py
@@ -176,7 +176,7 @@ def _request_process_bundle_progress(self, request):
     def task():
       instruction_reference = getattr(
           request, request.WhichOneof('request')).instruction_reference
-      if self._instruction_id_vs_worker.has_key(instruction_reference):
+      if instruction_reference in self._instruction_id_vs_worker:
         self._execute(
             lambda: self._instruction_id_vs_worker[
                 instruction_reference
diff --git a/sdks/python/apache_beam/runners/worker/sdk_worker_main.py 
b/sdks/python/apache_beam/runners/worker/sdk_worker_main.py
index cbd28568343..3e2353b2592 100644
--- a/sdks/python/apache_beam/runners/worker/sdk_worker_main.py
+++ b/sdks/python/apache_beam/runners/worker/sdk_worker_main.py
@@ -158,10 +158,10 @@ def _get_worker_count(pipeline_options):
     an int containing the worker_threads to use. Default is 1
   """
   pipeline_options = pipeline_options.get(
-      'options') if pipeline_options.has_key('options') else {}
+      'options') if 'options' in pipeline_options else {}
   experiments = pipeline_options.get(
       'experiments'
-  ) if pipeline_options and pipeline_options.has_key('experiments') else []
+  ) if pipeline_options and 'experiments' in pipeline_options else []
 
   experiments = experiments if experiments else []
 
diff --git a/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py 
b/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
index 0a71292f773..f2ca4e79f72 100644
--- a/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
+++ b/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
@@ -39,8 +39,7 @@ class WorkerIdInterceptor(grpc.StreamStreamClientInterceptor):
   # and throw exception in worker_id_interceptor.py after we have rolled out
   # the corresponding container changes.
   # Unique worker Id for this worker.
-  _worker_id = os.environ['WORKER_ID'] if os.environ.has_key(
-      'WORKER_ID') else str(uuid.uuid4())
+  _worker_id = os.environ.get('WORKER_ID', str(uuid.uuid4()))
 
   def __init__(self):
     pass
diff --git a/sdks/python/apache_beam/tools/map_fn_microbenchmark.py 
b/sdks/python/apache_beam/tools/map_fn_microbenchmark.py
index e640c2ba960..116c28e853e 100644
--- a/sdks/python/apache_beam/tools/map_fn_microbenchmark.py
+++ b/sdks/python/apache_beam/tools/map_fn_microbenchmark.py
@@ -56,7 +56,7 @@ def run_benchmark(num_maps=100, num_runs=10, 
num_elements_step=1000):
     print("%6d element%s %g sec" % (
         num_elements, " " if num_elements == 1 else "s", 
timings[num_elements]))
 
-  print
+  print()
   # pylint: disable=unused-variable
   gradient, intercept, r_value, p_value, std_err = stats.linregress(
       *list(zip(*list(timings.items()))))
diff --git a/website/.jenkins/append_index_html_to_internal_links.py 
b/website/.jenkins/append_index_html_to_internal_links.py
index 14702abd489..a3d1cf53c13 100644
--- a/website/.jenkins/append_index_html_to_internal_links.py
+++ b/website/.jenkins/append_index_html_to_internal_links.py
@@ -29,12 +29,18 @@
   'sudo apt-get install python-beautifulsoup4'.
 
 """
+from __future__ import print_function
 
 import fnmatch
 import os
 import re
 from bs4 import BeautifulSoup
 
+try:
+    unicode           # pylint: disable=unicode-builtin
+except NameError:
+    unicode = str
+
 # Original link match. Matches any string which starts with '/' and doesn't
 # have a file extension.
 linkMatch = r'^\/(.*\.(?!([^\/]+)$))?[^.]*$'
@@ -56,10 +62,10 @@
     if 'javadoc' not in root:
       matches.append(os.path.join(root, filename))
 
-print 'Matches: ' + str(len(matches))
+print('Matches: ' + str(len(matches)))
 # Iterates over each matched file looking for link matches.
 for match in matches:
-  print 'Fixing links in: ' + match
+  print('Fixing links in: ' + match)
   mf = open(match)
   soup = BeautifulSoup(mf)
   # Iterates over every <a>
@@ -86,7 +92,7 @@
         html = unicode(soup).encode('utf-8')
         # Write back to the file.
         with open(match, "wb") as f:
-          print 'Replacing ' + hr + ' with: ' + a['href']
+          print('Replacing ' + hr + ' with: ' + a['href'])
           f.write(html)
     except KeyError as e:
       # Some <a> tags don't have an href.


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


Issue Time Tracking
-------------------

    Worklog Id:     (was: 120519)
    Time Spent: 13.5h  (was: 13h 20m)

> Python 3 Support
> ----------------
>
>                 Key: BEAM-1251
>                 URL: https://issues.apache.org/jira/browse/BEAM-1251
>             Project: Beam
>          Issue Type: Improvement
>          Components: sdk-py-core
>            Reporter: Eyad Sibai
>            Assignee: Robbe
>            Priority: Trivial
>          Time Spent: 13.5h
>  Remaining Estimate: 0h
>
> I have been trying to use google datalab with python3. As I see there are 
> several packages that does not support python3 yet which google datalab 
> depends on. This is one of them.
> https://github.com/GoogleCloudPlatform/DataflowPythonSDK/issues/6



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to