This is an automated email from the ASF dual-hosted git repository.

altay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 3fafec1  [BEAM-5320] [BEAM-6106] Finish Python 3 porting for testing 
module (#7262)
3fafec1 is described below

commit 3fafec15d88cd774e581243fa52f90370586e633
Author: Robbe Sneyders <robbe.sneyd...@gmail.com>
AuthorDate: Thu Dec 13 01:42:49 2018 +0100

    [BEAM-5320] [BEAM-6106] Finish Python 3 porting for testing module (#7262)
    
    * Port testing module to Python 3
---
 .../apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py    | 2 +-
 sdks/python/apache_beam/testing/synthetic_pipeline.py              | 2 +-
 sdks/python/apache_beam/testing/synthetic_pipeline_test.py         | 2 +-
 sdks/python/apache_beam/testing/test_utils.py                      | 7 +++++--
 sdks/python/tox.ini                                                | 2 +-
 5 files changed, 9 insertions(+), 6 deletions(-)

diff --git 
a/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py 
b/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py
index 21e9c48..f7eb93b 100644
--- a/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py
+++ b/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py
@@ -41,7 +41,7 @@ class BigqueryTornadoesIT(unittest.TestCase):
 
   # The default checksum is a SHA-1 hash generated from sorted rows reading
   # from expected Bigquery table.
-  DEFAULT_CHECKSUM = '83789a7c1bca7959dcf23d3bc37e9204e594330f'
+  DEFAULT_CHECKSUM = 'd860e636050c559a16a791aff40d6ad809d4daf0'
 
   @attr('IT')
   def test_bigquery_tornadoes_it(self):
diff --git a/sdks/python/apache_beam/testing/synthetic_pipeline.py 
b/sdks/python/apache_beam/testing/synthetic_pipeline.py
index 2f8980b..b560f35 100644
--- a/sdks/python/apache_beam/testing/synthetic_pipeline.py
+++ b/sdks/python/apache_beam/testing/synthetic_pipeline.py
@@ -71,7 +71,7 @@ def div_round_up(a, b):
 def rotate_key(element):
   """Returns a new key-value pair of the same size but with a different key."""
   (key, value) = element
-  return key[-1] + key[:-1], value
+  return key[-1:] + key[:-1], value
 
 
 class SyntheticStep(beam.DoFn):
diff --git a/sdks/python/apache_beam/testing/synthetic_pipeline_test.py 
b/sdks/python/apache_beam/testing/synthetic_pipeline_test.py
index fe5e94a..e786553 100644
--- a/sdks/python/apache_beam/testing/synthetic_pipeline_test.py
+++ b/sdks/python/apache_beam/testing/synthetic_pipeline_test.py
@@ -125,7 +125,7 @@ class SyntheticPipelineTest(unittest.TestCase):
     if writes_output:
       read_output = []
       for file_name in glob.glob(output_location + '*'):
-        with open(file_name, 'r') as f:
+        with open(file_name, 'rb') as f:
           read_output.extend(f.read().splitlines())
 
       self.assertEqual(10, len(read_output))
diff --git a/sdks/python/apache_beam/testing/test_utils.py 
b/sdks/python/apache_beam/testing/test_utils.py
index 1f0e99e..f9aa128 100644
--- a/sdks/python/apache_beam/testing/test_utils.py
+++ b/sdks/python/apache_beam/testing/test_utils.py
@@ -75,11 +75,14 @@ class TempDir(object):
 
 
 def compute_hash(content, hashing_alg=DEFAULT_HASHING_ALG):
-  """Compute a hash value from a list of string."""
+  """Compute a hash value of a list of objects by hashing their string
+  representations."""
+  content = [str(x).encode('utf-8') if not isinstance(x, bytes) else x
+             for x in content]
   content.sort()
   m = hashlib.new(hashing_alg)
   for elem in content:
-    m.update(str(elem).encode('utf-8'))
+    m.update(elem)
   return m.hexdigest()
 
 
diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini
index a357a2f..2818870 100644
--- a/sdks/python/tox.ini
+++ b/sdks/python/tox.ini
@@ -58,7 +58,7 @@ setenv =
   BEAM_EXPERIMENTAL_PY3=1
   RUN_SKIPPED_PY3_TESTS=0
 modules =
-  
apache_beam.typehints,apache_beam.coders,apache_beam.options,apache_beam.tools,apache_beam.utils,apache_beam.internal,apache_beam.metrics,apache_beam.portability,apache_beam.pipeline_test,apache_beam.pvalue_test,apache_beam.runners,apache_beam.io.hadoopfilesystem_test,apache_beam.io.hdfs_integration_test,apache_beam.io.gcp.tests.utils_test,apache_beam.io.gcp.big_query_query_to_table_it_test,apache_beam.io.gcp.bigquery_io_read_it_test,apache_beam.io.gcp.bigquery_test,apache_beam.io.gcp.
 [...]
+  
apache_beam.typehints,apache_beam.coders,apache_beam.options,apache_beam.tools,apache_beam.utils,apache_beam.internal,apache_beam.metrics,apache_beam.portability,apache_beam.pipeline_test,apache_beam.pvalue_test,apache_beam.runners,apache_beam.io.hadoopfilesystem_test,apache_beam.io.hdfs_integration_test,apache_beam.io.gcp.tests.utils_test,apache_beam.io.gcp.big_query_query_to_table_it_test,apache_beam.io.gcp.bigquery_io_read_it_test,apache_beam.io.gcp.bigquery_test,apache_beam.io.gcp.
 [...]
 commands =
   python --version
   pip --version

Reply via email to