Repository: cassandra-dtest
Updated Branches:
  refs/heads/mck/fix-codestyle-violations [created] 557d01231


WIP – Fix flake8 violations, so travis and circleci pass


Project: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/commit/557d0123
Tree: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/tree/557d0123
Diff: http://git-wip-us.apache.org/repos/asf/cassandra-dtest/diff/557d0123

Branch: refs/heads/mck/fix-codestyle-violations
Commit: 557d01231dbecce94d312f4e61c258292f239e79
Parents: 0fd9a55
Author: Mick Semb Wever <m...@apache.org>
Authored: Wed May 16 18:35:11 2018 +1000
Committer: Mick Semb Wever <m...@apache.org>
Committed: Sat May 19 09:11:06 2018 +1000

----------------------------------------------------------------------
 cqlsh_tests/cqlsh_copy_tests.py                | 41 ++++++++++-----------
 cqlsh_tests/cqlsh_tools.py                     |  5 ---
 upgrade_tests/cql_tests.py                     | 16 +++++---
 upgrade_tests/paging_test.py                   |  2 +-
 upgrade_tests/storage_engine_upgrade_test.py   |  8 ++--
 upgrade_tests/thrift_upgrade_test.py           | 39 +++++++++++---------
 upgrade_tests/upgrade_base.py                  |  4 +-
 upgrade_tests/upgrade_compact_storage.py       | 16 ++++----
 upgrade_tests/upgrade_supercolumns_test.py     | 16 ++------
 upgrade_tests/upgrade_through_versions_test.py | 22 ++++++-----
 10 files changed, 83 insertions(+), 86 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/cqlsh_tests/cqlsh_copy_tests.py
----------------------------------------------------------------------
diff --git a/cqlsh_tests/cqlsh_copy_tests.py b/cqlsh_tests/cqlsh_copy_tests.py
index 9769fd7..0812b1b 100644
--- a/cqlsh_tests/cqlsh_copy_tests.py
+++ b/cqlsh_tests/cqlsh_copy_tests.py
@@ -25,8 +25,8 @@ from cassandra.util import SortedSet
 from ccmlib.common import is_win
 
 from .cqlsh_tools import (DummyColorMap, assert_csvs_items_equal, csv_rows,
-                         monkeypatch_driver, random_list, unmonkeypatch_driver,
-                         write_rows_to_csv)
+                          monkeypatch_driver, random_list, 
unmonkeypatch_driver,
+                          write_rows_to_csv)
 from dtest import (Tester, create_ks)
 from tools.data import rows_to_list
 from tools.metadata_wrapper import (UpdatingClusterMetadataWrapper,
@@ -386,7 +386,7 @@ class TestCqlshCopy(Tester):
         except Exception as e:
             if len(csv_results) != len(processed_results):
                 logger.warning("Different # of entries. CSV: " + 
str(len(csv_results)) +
-                        " vs results: " + str(len(processed_results)))
+                               " vs results: " + str(len(processed_results)))
             elif csv_results[0] is not None:
                 for x in range(0, len(csv_results[0])):
                     if csv_results[0][x] != processed_results[0][x]:
@@ -398,7 +398,6 @@ class TestCqlshCopy(Tester):
     def make_csv_formatter(self, time_format, nullval):
         with self._cqlshlib() as cqlshlib:  # noqa
             from cqlshlib.formatting import format_value, format_value_default
-            from cqlshlib.displaying import NO_COLOR_MAP
         try:
             from cqlshlib.formatting import DateTimeFormat
             date_time_format = DateTimeFormat()
@@ -875,8 +874,8 @@ class TestCqlshCopy(Tester):
             csv_values = list(csv.reader(csvfile))
 
         assert csv_values == [['1', '2015/01/01 07:00'],
-                               ['2', '2015/06/10 12:30'],
-                               ['3', '2015/12/31 23:59']]
+            ['2', '2015/06/10 12:30'],
+            ['3', '2015/12/31 23:59']]
 
         self.session.execute("TRUNCATE testdatetimeformat")
         cmds = "COPY ks.testdatetimeformat FROM 
'{name}'".format(name=tempfile.name)
@@ -890,7 +889,7 @@ class TestCqlshCopy(Tester):
 
         imported_results = list(self.session.execute("SELECT * FROM 
testdatetimeformat"))
         assert self.result_to_csv_rows(exported_results, cql_type_names, 
time_format=format) \
-               == self.result_to_csv_rows(imported_results, cql_type_names, 
time_format=format)
+            == self.result_to_csv_rows(imported_results, cql_type_names, 
time_format=format)
 
     @since('3.2')
     def test_reading_with_ttl(self):
@@ -1103,7 +1102,7 @@ class TestCqlshCopy(Tester):
 
         tempfile = self.get_temp_file()
         logger.debug('Exporting tokens {} - {} for {} records to csv file: 
{}'.format(begin_token, end_token,
-                                                                               
num_records, tempfile.name))
+                     num_records, tempfile.name))
         cmds = "COPY ks.testtokens TO '{}'".format(tempfile.name)
         if begin_token and end_token:
             cmds += "WITH BEGINTOKEN = '{}' AND ENDTOKEN = 
'{}'".format(begin_token, end_token)
@@ -1215,7 +1214,7 @@ class TestCqlshCopy(Tester):
             num_expected_rows = num_rows - chunk_size  # one chunk will fail
 
             logger.debug("Importing csv file {} with {} max insert errors and 
chunk size {}"
-                  .format(tempfile.name, max_insert_errors, chunk_size))
+                         .format(tempfile.name, max_insert_errors, chunk_size))
             # Note: we use one attempt because each attempt counts as a failure
             out, err, _ = self.run_cqlsh(cmds="COPY ks.testmaxinserterrors 
FROM '{}' WITH MAXINSERTERRORS='{}' "
                                          "AND CHUNKSIZE='{}' AND 
MAXATTEMPTS='1'"
@@ -1279,7 +1278,7 @@ class TestCqlshCopy(Tester):
             self.session.execute("TRUNCATE testparseerrors")
 
             logger.debug("Importing csv file {} with err_file {} and {}/{}/{}"
-                  .format(tempfile.name, err_file_name, num_chunks, 
chunk_size, num_failing_per_chunk))
+                         .format(tempfile.name, err_file_name, num_chunks, 
chunk_size, num_failing_per_chunk))
             cmd = "COPY ks.testparseerrors FROM '{}' WITH 
CHUNKSIZE={}".format(tempfile.name, chunk_size)
             if err_file:
                 cmd += " AND ERRFILE='{}'".format(err_file.name)
@@ -1418,7 +1417,7 @@ class TestCqlshCopy(Tester):
         def do_test(max_size, header):
             tempfile = self.get_temp_file(prefix='testwritemult', 
suffix='.csv')
             logger.debug('Exporting to csv file: {} with max size {} and 
header {}'
-                  .format(tempfile.name, max_size, header))
+                         .format(tempfile.name, max_size, header))
             cmd = "COPY {} TO '{}' WITH 
MAXOUTPUTSIZE='{}'".format(stress_table, tempfile.name, max_size)
             if header:
                 cmd += " AND HEADER='True'"
@@ -2000,7 +1999,7 @@ class TestCqlshCopy(Tester):
 
         def do_test(expected_vals, thousands_sep, decimal_sep):
             logger.debug('Exporting to csv file: {} with thousands_sep {} and 
decimal_sep {}'
-                  .format(tempfile.name, thousands_sep, decimal_sep))
+                         .format(tempfile.name, thousands_sep, decimal_sep))
             self.run_cqlsh(cmds="COPY ks.testnumberseps TO '{}' WITH 
THOUSANDSSEP='{}' AND DECIMALSEP='{}'"
                            .format(tempfile.name, thousands_sep, decimal_sep))
 
@@ -2009,7 +2008,7 @@ class TestCqlshCopy(Tester):
             assert expected_vals == list(csv_rows(tempfile.name))
 
             logger.debug('Importing from csv file: {} with thousands_sep {} 
and decimal_sep {}'
-                  .format(tempfile.name, thousands_sep, decimal_sep))
+                         .format(tempfile.name, thousands_sep, decimal_sep))
             self.session.execute('TRUNCATE ks.testnumberseps')
             self.run_cqlsh(cmds="COPY ks.testnumberseps FROM '{}' WITH 
THOUSANDSSEP='{}' AND DECIMALSEP='{}'"
                            .format(tempfile.name, thousands_sep, decimal_sep))
@@ -2024,7 +2023,7 @@ class TestCqlshCopy(Tester):
 
             # we format as if we were comparing to csv to overcome loss of 
precision in the import
             assert self.result_to_csv_rows(exported_results == cql_type_names,
-                             self.result_to_csv_rows(imported_results, 
cql_type_names))
+                                           
self.result_to_csv_rows(imported_results, cql_type_names))
 
         do_test(expected_vals_usual, ',', '.')
         do_test(expected_vals_inverted, '.', ',')
@@ -2058,8 +2057,8 @@ class TestCqlshCopy(Tester):
 
         csv_results = sorted(list(csv_rows(tempfile2.name)))
         assert [['1', '1943-06-19 11:21:01.000+0000'],
-                               ['2', '1943-06-19 11:21:01.123+0000'],
-                               ['3', '1943-06-19 11:21:01.124+0000']] == 
csv_results
+            ['2', '1943-06-19 11:21:01.123+0000'],
+            ['3', '1943-06-19 11:21:01.124+0000']] == csv_results
 
     @since('3.6')
     def test_round_trip_with_different_number_precision(self):
@@ -2145,7 +2144,7 @@ class TestCqlshCopy(Tester):
         logger.debug(out)
         assert 'Using {} child processes'.format(num_processes) in out
         assert [[num_records]] == rows_to_list(self.session.execute("SELECT 
COUNT(* FROM {}"
-                                                                            
.format(stress_table)))
+                                               .format(stress_table)))
 
     def test_round_trip_with_rate_file(self):
         """
@@ -2193,7 +2192,7 @@ class TestCqlshCopy(Tester):
 
         # check all records were imported
         assert [[num_rows]] == rows_to_list(self.session.execute("SELECT 
COUNT(* FROM {}"
-                                                                         
.format(stress_table)))
+                                            .format(stress_table)))
 
         check_rate_file()
 
@@ -2661,7 +2660,7 @@ class TestCqlshCopy(Tester):
         os.environ['CQLSH_COPY_TEST_FAILURES'] = json.dumps(failures)
 
         logger.debug('Exporting to csv file: {} with {} and 3 max attempts'
-              .format(tempfile.name, os.environ['CQLSH_COPY_TEST_FAILURES']))
+                     .format(tempfile.name, 
os.environ['CQLSH_COPY_TEST_FAILURES']))
         out, err, _ = self.run_cqlsh(cmds="COPY {} TO '{}' WITH 
MAXATTEMPTS='3'"
                                      .format(stress_table, tempfile.name))
         logger.debug(out)
@@ -2690,7 +2689,7 @@ class TestCqlshCopy(Tester):
         failures = {'failing_range': {'start': start, 'end': end, 
'num_failures': 3}}
         os.environ['CQLSH_COPY_TEST_FAILURES'] = json.dumps(failures)
         logger.debug('Exporting to csv file: {} with {} and 5 max attemps'
-              .format(tempfile.name, os.environ['CQLSH_COPY_TEST_FAILURES']))
+                     .format(tempfile.name, 
os.environ['CQLSH_COPY_TEST_FAILURES']))
         out, err, _ = self.run_cqlsh(cmds="COPY {} TO '{}' WITH 
MAXATTEMPTS='5'"
                                      .format(stress_table, tempfile.name))
         logger.debug(out)
@@ -2720,7 +2719,7 @@ class TestCqlshCopy(Tester):
         os.environ['CQLSH_COPY_TEST_FAILURES'] = json.dumps(failures)
 
         logger.debug('Exporting to csv file: {} with {}'
-              .format(tempfile.name, os.environ['CQLSH_COPY_TEST_FAILURES']))
+                     .format(tempfile.name, 
os.environ['CQLSH_COPY_TEST_FAILURES']))
         out, err, _ = self.run_cqlsh(cmds="COPY {} TO 
'{}'".format(stress_table, tempfile.name))
         logger.debug(out)
         logger.debug(err)

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/cqlsh_tests/cqlsh_tools.py
----------------------------------------------------------------------
diff --git a/cqlsh_tests/cqlsh_tools.py b/cqlsh_tests/cqlsh_tools.py
index 7175fb9..84d3b62 100644
--- a/cqlsh_tests/cqlsh_tools.py
+++ b/cqlsh_tests/cqlsh_tools.py
@@ -3,9 +3,6 @@ import random
 
 import cassandra
 
-from cassandra.cluster import ResultSet
-from typing import List
-
 
 class DummyColorMap(object):
 
@@ -116,5 +113,3 @@ def assert_resultset_contains(got: ResultSet, expected: 
List[tuple]) -> None:
             if row.a == t[0] and row.b == t[1]:
                 found = True
         assert found, 'Failed to find expected row: {}'.format(t)
-
-

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/cql_tests.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/cql_tests.py b/upgrade_tests/cql_tests.py
index f08a141..308ac9d 100644
--- a/upgrade_tests/cql_tests.py
+++ b/upgrade_tests/cql_tests.py
@@ -17,12 +17,16 @@ from cassandra.query import SimpleStatement
 from cassandra.util import sortedset
 
 from dtest import RUN_STATIC_UPGRADE_MATRIX
-from thrift_bindings.thrift010.ttypes import \
-    ConsistencyLevel as ThriftConsistencyLevel
-from thrift_bindings.thrift010.ttypes import (CfDef, Column, ColumnDef,
-                                        ColumnOrSuperColumn, ColumnParent,
-                                        Deletion, Mutation, SlicePredicate,
-                                        SliceRange)
+from thrift_bindings.thrift010.ttypes import CfDef
+from thrift_bindings.thrift010.ttypes import Column
+from thrift_bindings.thrift010.ttypes import ColumnDef
+from thrift_bindings.thrift010.ttypes import ColumnOrSuperColumn
+from thrift_bindings.thrift010.ttypes import ColumnParent
+from thrift_bindings.thrift010.ttypes import ConsistencyLevel as 
ThriftConsistencyLevel
+from thrift_bindings.thrift010.ttypes import Deletion
+from thrift_bindings.thrift010.ttypes import Mutation
+from thrift_bindings.thrift010.ttypes import SlicePredicate
+from thrift_bindings.thrift010.ttypes import SliceRange
 from thrift_test import get_thrift_client
 from tools.assertions import (assert_all, assert_invalid, assert_length_equal,
                               assert_none, assert_one, assert_row_count)

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/paging_test.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/paging_test.py b/upgrade_tests/paging_test.py
index e21ba88..3960b08 100644
--- a/upgrade_tests/paging_test.py
+++ b/upgrade_tests/paging_test.py
@@ -651,7 +651,7 @@ class TestPagingData(BasePagingTester, PageAssertionMixin):
             latest_version_with_bug = '2.2.3'
             if min_version <= latest_version_with_bug:
                 pytest.skip('known bug released in {latest_ver} and earlier 
(current min version {min_ver}); '
-                               
'skipping'.format(latest_ver=latest_version_with_bug, min_ver=min_version))
+                            
'skipping'.format(latest_ver=latest_version_with_bug, min_ver=min_version))
 
             logger.debug("Querying %s node" % ("upgraded" if is_upgraded else 
"old",))
             cursor.execute("TRUNCATE test")

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/storage_engine_upgrade_test.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/storage_engine_upgrade_test.py 
b/upgrade_tests/storage_engine_upgrade_test.py
index 4cc718c..1dfa402 100644
--- a/upgrade_tests/storage_engine_upgrade_test.py
+++ b/upgrade_tests/storage_engine_upgrade_test.py
@@ -5,9 +5,11 @@ import logging
 
 from dtest import Tester
 from sstable_generation_loading_test import TestBaseSStableLoader
-from thrift_bindings.thrift010.Cassandra import (ConsistencyLevel, Deletion,
-                                           Mutation, SlicePredicate,
-                                           SliceRange)
+from thrift_bindings.thrift010.Cassandra import ConsistencyLevel
+from thrift_bindings.thrift010.Cassandra import Deletion
+from thrift_bindings.thrift010.Cassandra import Mutation
+from thrift_bindings.thrift010.Cassandra import SlicePredicate
+from thrift_bindings.thrift010.Cassandra import SliceRange
 from thrift_test import composite, get_thrift_client, i32
 from tools.assertions import (assert_all, assert_length_equal, assert_none,
                               assert_one)

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/thrift_upgrade_test.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/thrift_upgrade_test.py 
b/upgrade_tests/thrift_upgrade_test.py
index 42343a2..a29e348 100644
--- a/upgrade_tests/thrift_upgrade_test.py
+++ b/upgrade_tests/thrift_upgrade_test.py
@@ -6,9 +6,12 @@ from cassandra.query import dict_factory
 
 from dtest import RUN_STATIC_UPGRADE_MATRIX, Tester
 from thrift_bindings.thrift010 import Cassandra
-from thrift_bindings.thrift010.Cassandra import (Column, ColumnDef,
-                                           ColumnParent, ConsistencyLevel,
-                                           SlicePredicate, SliceRange)
+from thrift_bindings.thrift010.Cassandra import Column
+from thrift_bindings.thrift010.Cassandra import ColumnDef
+from thrift_bindings.thrift010.Cassandra import ColumnParent
+from thrift_bindings.thrift010.Cassandra import ConsistencyLevel
+from thrift_bindings.thrift010.Cassandra import SlicePredicate
+from thrift_bindings.thrift010.Cassandra import SliceRange
 from thrift_test import _i64, get_thrift_client
 from tools.assertions import assert_length_equal
 from .upgrade_base import UpgradeTester
@@ -40,17 +43,17 @@ def _validate_sparse_cql(cursor, cf='sparse_super_1', 
column1='column1', col1='c
     cursor.execute('use ks')
 
     assert (list(cursor.execute("SELECT * FROM {}".format(cf))) ==
-                 [{key: 'k1', column1: 'key1', col1: 200, col2: 300},
-                  {key: 'k1', column1: 'key2', col1: 200, col2: 300},
-                  {key: 'k2', column1: 'key1', col1: 200, col2: 300},
-                  {key: 'k2', column1: 'key2', col1: 200, col2: 300}])
+            [{key: 'k1', column1: 'key1', col1: 200, col2: 300},
+            {key: 'k1', column1: 'key2', col1: 200, col2: 300},
+            {key: 'k2', column1: 'key1', col1: 200, col2: 300},
+            {key: 'k2', column1: 'key2', col1: 200, col2: 300}])
 
     assert (list(cursor.execute("SELECT * FROM {} WHERE {} = 'k1'".format(cf, 
key))) ==
-                 [{key: 'k1', column1: 'key1', col1: 200, col2: 300},
-                  {key: 'k1', column1: 'key2', col1: 200, col2: 300}])
+            [{key: 'k1', column1: 'key1', col1: 200, col2: 300},
+            {key: 'k1', column1: 'key2', col1: 200, col2: 300}])
 
     assert (list(cursor.execute("SELECT * FROM {} WHERE {} = 'k2' AND {} = 
'key1'".format(cf, key, column1))) ==
-                 [{key: 'k2', column1: 'key1', col1: 200, col2: 300}])
+            [{key: 'k2', column1: 'key1', col1: 200, col2: 300}])
 
 
 def _validate_sparse_thrift(client, cf='sparse_super_1'):
@@ -74,20 +77,20 @@ def _validate_dense_cql(cursor, cf='dense_super_1', 
key='key', column1='column1'
     cursor.execute('use ks')
 
     assert (list(cursor.execute("SELECT * FROM {}".format(cf))) ==
-                 [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'},
-                  {key: 'k1', column1: 'key2', column2: 100, value: 'value1'},
-                  {key: 'k2', column1: 'key1', column2: 200, value: 'value2'},
-                  {key: 'k2', column1: 'key2', column2: 200, value: 'value2'}])
+            [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'},
+            {key: 'k1', column1: 'key2', column2: 100, value: 'value1'},
+            {key: 'k2', column1: 'key1', column2: 200, value: 'value2'},
+            {key: 'k2', column1: 'key2', column2: 200, value: 'value2'}])
 
     assert (list(cursor.execute("SELECT * FROM {} WHERE {} = 'k1'".format(cf, 
key))) ==
-                 [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'},
-                  {key: 'k1', column1: 'key2', column2: 100, value: 'value1'}])
+            [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'},
+            {key: 'k1', column1: 'key2', column2: 100, value: 'value1'}])
 
     assert (list(cursor.execute("SELECT * FROM {} WHERE {} = 'k1' AND {} = 
'key1'".format(cf, key, column1))) ==
-                 [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'}])
+            [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'}])
 
     assert (list(cursor.execute("SELECT * FROM {} WHERE {} = 'k1' AND {} = 
'key1' AND {} = 100".format(cf, key, column1, column2))) ==
-                 [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'}])
+            [{key: 'k1', column1: 'key1', column2: 100, value: 'value1'}])
 
 
 def _validate_dense_thrift(client, cf='dense_super_1'):

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/upgrade_base.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/upgrade_base.py b/upgrade_tests/upgrade_base.py
index a403835..aee117a 100644
--- a/upgrade_tests/upgrade_base.py
+++ b/upgrade_tests/upgrade_base.py
@@ -55,7 +55,7 @@ class UpgradeTester(Tester, metaclass=ABCMeta):
                 'rejected from 
org.apache.cassandra.concurrent.DebuggableScheduledThreadPoolExecutor'
             )
             fixture_dtest_setup.ignore_log_patterns = 
fixture_dtest_setup.ignore_log_patterns \
-                                                      + 
[_known_teardown_race_error]
+                + [_known_teardown_race_error]
 
         fixture_dtest_setup.ignore_log_patterns = 
fixture_dtest_setup.ignore_log_patterns + [
             r'RejectedExecutionException.*ThreadPoolExecutor has shut down',  
# see  CASSANDRA-12364
@@ -64,7 +64,7 @@ class UpgradeTester(Tester, metaclass=ABCMeta):
     def setUp(self):
         self.validate_class_config()
         logger.debug("Upgrade test beginning, setting CASSANDRA_VERSION to {}, 
and jdk to {}. (Prior values will be restored after test)."
-              .format(self.UPGRADE_PATH.starting_version, 
self.UPGRADE_PATH.starting_meta.java_version))
+                     .format(self.UPGRADE_PATH.starting_version, 
self.UPGRADE_PATH.starting_meta.java_version))
         switch_jdks(self.UPGRADE_PATH.starting_meta.java_version)
         os.environ['CASSANDRA_VERSION'] = self.UPGRADE_PATH.starting_version
         super(UpgradeTester, self).setUp()

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/upgrade_compact_storage.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/upgrade_compact_storage.py 
b/upgrade_tests/upgrade_compact_storage.py
index ed85515..4c4a9f5 100644
--- a/upgrade_tests/upgrade_compact_storage.py
+++ b/upgrade_tests/upgrade_compact_storage.py
@@ -109,7 +109,7 @@ class TestUpgradeSuperColumnsThrough(Tester):
 
         session = self.patient_cql_connection(node, row_factory=dict_factory)
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE pk 
= 1")) ==
-                     [{'col2': 1, 'pk': 1, 'column1': None, 'value': None, 
'col1': 1}])
+                [{'col2': 1, 'pk': 1, 'column1': None, 'value': None, 'col1': 
1}])
 
     def test_force_readd_compact_storage(self):
         cluster = self.prepare(cassandra_version=VERSION_311)
@@ -130,7 +130,7 @@ class TestUpgradeSuperColumnsThrough(Tester):
         session.execute("update system_schema.tables set flags={} where 
keyspace_name='ks' and table_name='compact_table';")
 
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE pk 
= 1")) ==
-                     [{'col2': 1, 'pk': 1, 'column1': None, 'value': None, 
'col1': 1}])
+                [{'col2': 1, 'pk': 1, 'column1': None, 'value': None, 'col1': 
1}])
 
         self.fixture_dtest_setup.allow_log_errors = True
 
@@ -158,21 +158,21 @@ class TestUpgradeSuperColumnsThrough(Tester):
             session.execute("INSERT INTO ks.compact_table (pk, col1) VALUES 
('{pk}', '{col1}')".format(pk=i, col1=i * 10))
 
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE 
col1 = '50'")) ==
-                     [{'pk': '5', 'col1': '50'}])
+                [{'pk': '5', 'col1': '50'}])
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE pk 
= '5'")) ==
-                     [{'pk': '5', 'col1': '50'}])
+                [{'pk': '5', 'col1': '50'}])
         session.execute("ALTER TABLE ks.compact_table DROP COMPACT STORAGE")
 
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE 
col1 = '50'")) ==
-                     [{'col1': '50', 'column1': None, 'pk': '5', 'value': 
None}])
+                [{'col1': '50', 'column1': None, 'pk': '5', 'value': None}])
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE pk 
= '5'")) ==
-                     [{'col1': '50', 'column1': None, 'pk': '5', 'value': 
None}])
+                [{'col1': '50', 'column1': None, 'pk': '5', 'value': None}])
 
         self.upgrade_to_version(VERSION_TRUNK, wait=True)
 
         session = self.patient_cql_connection(node, row_factory=dict_factory)
 
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE 
col1 = '50'")) ==
-                     [{'col1': '50', 'column1': None, 'pk': '5', 'value': 
None}])
+                [{'col1': '50', 'column1': None, 'pk': '5', 'value': None}])
         assert (list(session.execute("SELECT * FROM ks.compact_table WHERE pk 
= '5'")) ==
-                     [{'col1': '50', 'column1': None, 'pk': '5', 'value': 
None}])
+                [{'col1': '50', 'column1': None, 'pk': '5', 'value': None}])

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/upgrade_supercolumns_test.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/upgrade_supercolumns_test.py 
b/upgrade_tests/upgrade_supercolumns_test.py
index e216f5c..ea54f90 100644
--- a/upgrade_tests/upgrade_supercolumns_test.py
+++ b/upgrade_tests/upgrade_supercolumns_test.py
@@ -6,18 +6,10 @@ from dtest import Tester
 from thrift_test import get_thrift_client
 from tools.assertions import assert_all
 
-from thrift_bindings.thrift010.Cassandra import (CfDef, Column, ColumnDef,
-                                           ColumnOrSuperColumn, ColumnParent,
-                                           ColumnPath, ColumnSlice,
-                                           ConsistencyLevel, CounterColumn,
-                                           Deletion, IndexExpression,
-                                           IndexOperator, IndexType,
-                                           InvalidRequestException, KeyRange,
-                                           KeySlice, KsDef, MultiSliceRequest,
-                                           Mutation, NotFoundException,
-                                           SlicePredicate, SliceRange,
-                                           SuperColumn)
-
+from thrift_bindings.thrift010.Cassandra import ColumnParent
+from thrift_bindings.thrift010.Cassandra import ConsistencyLevel
+from thrift_bindings.thrift010.Cassandra import SlicePredicate
+from thrift_bindings.thrift010.Cassandra import SliceRange
 logger = logging.getLogger(__name__)
 
 # Use static supercolumn data to reduce total test time and avoid driver 
issues connecting to C* 1.2.

http://git-wip-us.apache.org/repos/asf/cassandra-dtest/blob/557d0123/upgrade_tests/upgrade_through_versions_test.py
----------------------------------------------------------------------
diff --git a/upgrade_tests/upgrade_through_versions_test.py 
b/upgrade_tests/upgrade_through_versions_test.py
index 397ea15..3c6872f 100644
--- a/upgrade_tests/upgrade_through_versions_test.py
+++ b/upgrade_tests/upgrade_through_versions_test.py
@@ -19,9 +19,13 @@ from cassandra.query import SimpleStatement
 from dtest import RUN_STATIC_UPGRADE_MATRIX, Tester
 from tools.misc import generate_ssl_stores, new_node
 from .upgrade_base import switch_jdks
-from .upgrade_manifest import (build_upgrade_pairs, current_2_0_x,
-                              current_2_1_x, current_2_2_x, current_3_0_x,
-                              indev_2_2_x, indev_3_x)
+from upgrade_manifest import build_upgrade_pairs
+from upgrade_manifest import current_2_0_x
+from upgrade_manifest import current_2_1_x
+from upgrade_manifest import current_2_2_x
+from upgrade_manifest import current_3_0_x
+from upgrade_manifest import indev_2_2_x
+from upgrade_manifest import indev_3_x
 
 logger = logging.getLogger(__name__)
 
@@ -249,7 +253,7 @@ class TestUpgrade(Tester):
 
     def setUp(self):
         logger.debug("Upgrade test beginning, setting CASSANDRA_VERSION to {}, 
and jdk to {}. (Prior values will be restored after test)."
-              .format(self.test_version_metas[0].version, 
self.test_version_metas[0].java_version))
+                     .format(self.test_version_metas[0].version, 
self.test_version_metas[0].java_version))
         os.environ['CASSANDRA_VERSION'] = self.test_version_metas[0].version
         switch_jdks(self.test_version_metas[0].java_version)
 
@@ -347,7 +351,7 @@ class TestUpgrade(Tester):
 
                     self._check_on_subprocs(self.fixture_dtest_setup.subprocs)
                     logger.debug('Successfully upgraded %d of %d nodes to %s' %
-                          (num + 1, len(self.cluster.nodelist()), 
version_meta.version))
+                                 (num + 1, len(self.cluster.nodelist()), 
version_meta.version))
 
                 self.cluster.set_install_dir(version=version_meta.version)
 
@@ -775,11 +779,9 @@ def create_upgrade_class(clsname, version_metas, 
protocol_version,
     upgrade_applies_to_env = RUN_STATIC_UPGRADE_MATRIX or 
version_metas[-1].matches_current_env_version_family
     if not upgrade_applies_to_env:
         pytest.mark.skip(reason='test not applicable to env.')
-    newcls = type(
-            clsname,
-            parent_classes,
-            {'test_version_metas': version_metas, '__test__': True, 
'protocol_version': protocol_version, 'extra_config': extra_config}
-        )
+    newcls = type(clsname,
+                  parent_classes,
+                  {'test_version_metas': version_metas, '__test__': True, 
'protocol_version': protocol_version, 'extra_config': extra_config})
 
     if clsname in globals():
         raise RuntimeError("Class by name already exists!")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@cassandra.apache.org
For additional commands, e-mail: commits-h...@cassandra.apache.org

Reply via email to