Title: [91491] trunk/Tools
Revision
91491
Author
[email protected]
Date
2011-07-21 12:45:46 -0700 (Thu, 21 Jul 2011)

Log Message

cleanup jsonresults.py style in preparation for making some changes
https://bugs.webkit.org/show_bug.cgi?id=64968

Reviewed by Adam Barth.

No code changes. Unittest still passes. Mostly just removing
useless comments and 80 character wrapping.

* TestResultServer/model/jsonresults.py:
* TestResultServer/model/jsonresults_unittest.py:

Modified Paths

Diff

Modified: trunk/Tools/ChangeLog (91490 => 91491)


--- trunk/Tools/ChangeLog	2011-07-21 19:30:32 UTC (rev 91490)
+++ trunk/Tools/ChangeLog	2011-07-21 19:45:46 UTC (rev 91491)
@@ -1,3 +1,16 @@
+2011-07-21  Ojan Vafai  <[email protected]>
+
+        cleanup jsonresults.py style in preparation for making some changes
+        https://bugs.webkit.org/show_bug.cgi?id=64968
+
+        Reviewed by Adam Barth.
+
+        No code changes. Unittest still passes. Mostly just removing
+        useless comments and 80 character wrapping.
+
+        * TestResultServer/model/jsonresults.py:
+        * TestResultServer/model/jsonresults_unittest.py:
+
 2011-07-21  Adam Barth  <[email protected]>
 
         Add missing column header.

Modified: trunk/Tools/TestResultServer/model/jsonresults.py (91490 => 91491)


--- trunk/Tools/TestResultServer/model/jsonresults.py	2011-07-21 19:30:32 UTC (rev 91490)
+++ trunk/Tools/TestResultServer/model/jsonresults.py	2011-07-21 19:45:46 UTC (rev 91491)
@@ -53,49 +53,18 @@
 class JsonResults(object):
     @classmethod
     def _strip_prefix_suffix(cls, data):
-        """Strip out prefix and suffix of json results string.
-
-        Args:
-            data: json file content.
-
-        Returns:
-            json string without prefix and suffix.
-        """
-
         assert(data.startswith(JSON_RESULTS_PREFIX))
         assert(data.endswith(JSON_RESULTS_SUFFIX))
 
-        return data[len(JSON_RESULTS_PREFIX):
-                    len(data) - len(JSON_RESULTS_SUFFIX)]
+        return data[len(JSON_RESULTS_PREFIX):len(data) - len(JSON_RESULTS_SUFFIX)]
 
     @classmethod
     def _generate_file_data(cls, json, sort_keys=False):
-        """Given json string, generate file content data by adding
-           prefix and suffix.
-
-        Args:
-            json: json string without prefix and suffix.
-
-        Returns:
-            json file data.
-        """
-
-        data = "" separators=(',', ':'),
-            sort_keys=sort_keys)
+        data = "" separators=(',', ':'), sort_keys=sort_keys)
         return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX
 
     @classmethod
     def _load_json(cls, file_data):
-        """Load json file to a python object.
-
-        Args:
-            file_data: json file content.
-
-        Returns:
-            json object or
-            None on failure.
-        """
-
         json_results_str = cls._strip_prefix_suffix(file_data)
         if not json_results_str:
             logging.warning("No json results data.")
@@ -110,24 +79,9 @@
 
     @classmethod
     def _merge_json(cls, aggregated_json, incremental_json, num_runs):
-        """Merge incremental json into aggregated json results.
-
-        Args:
-            aggregated_json: aggregated json object.
-            incremental_json: incremental json object.
-            num_runs: number of total runs to include.
-
-        Returns:
-            True if merge succeeds or
-            False on failure.
-        """
-
-        # Merge non tests property data.
-        # Tests properties are merged in _merge_tests.
         if not cls._merge_non_test_data(aggregated_json, incremental_json, num_runs):
             return False
 
-        # Merge tests results and times
         incremental_tests = incremental_json[JSON_RESULTS_TESTS]
         if incremental_tests:
             aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
@@ -137,44 +91,25 @@
 
     @classmethod
     def _merge_non_test_data(cls, aggregated_json, incremental_json, num_runs):
-        """Merge incremental non tests property data into aggregated json results.
-
-        Args:
-            aggregated_json: aggregated json object.
-            incremental_json: incremental json object.
-            num_runs: number of total runs to include.
-
-        Returns:
-            True if merge succeeds or
-            False on failure.
-        """
-
         incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS]
         aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS]
         aggregated_build_number = int(aggregated_builds[0])
-        # Loop through all incremental builds, start from the oldest run.
+
         for index in reversed(range(len(incremental_builds))):
             build_number = int(incremental_builds[index])
-            logging.debug("Merging build %s, incremental json index: %d.",
-                build_number, index)
+            logging.debug("Merging build %s, incremental json index: %d.", build_number, index)
 
-            # Return if not all build numbers in the incremental json results
-            # are newer than the most recent build in the aggregated results.
             # FIXME: make this case work.
             if build_number < aggregated_build_number:
-                logging.warning(("Build %d in incremental json is older than "
-                    "the most recent build in aggregated results: %d"),
+                logging.warning("Build %d in incremental json is older than the most recent build in aggregated results: %d",
                     build_number, aggregated_build_number)
                 return False
 
-            # Return if the build number is duplicated.
             # FIXME: skip the duplicated build and merge rest of the results.
-            #        Need to be careful on skiping the corresponding value in
-            #        _merge_tests because the property data for each test could
-            #        be accumulated.
+            # Need to be careful on skiping the corresponding value in
+            # _merge_tests because the property data for each test could be accumulated.
             if build_number == aggregated_build_number:
-                logging.warning("Duplicate build %d in incremental json",
-                    build_number)
+                logging.warning("Duplicate build %d in incremental json", build_number)
                 return False
 
             # Merge this build into aggreagated results.
@@ -183,17 +118,7 @@
         return True
 
     @classmethod
-    def _merge_one_build(cls, aggregated_json, incremental_json,
-                         incremental_index, num_runs):
-        """Merge one build of incremental json into aggregated json results.
-
-        Args:
-            aggregated_json: aggregated json object.
-            incremental_json: incremental json object.
-            incremental_index: index of the incremental json results to merge.
-            num_runs: number of total runs to include.
-        """
-
+    def _merge_one_build(cls, aggregated_json, incremental_json, incremental_index, num_runs):
         for key in incremental_json.keys():
             # Merge json results except "tests" properties (results, times etc).
             # "tests" properties will be handled separately.
@@ -201,25 +126,14 @@
                 continue
 
             if key in aggregated_json:
-                aggregated_json[key].insert(
-                    0, incremental_json[key][incremental_index])
-                aggregated_json[key] = \
-                    aggregated_json[key][:num_runs]
+                aggregated_json[key].insert(0, incremental_json[key][incremental_index])
+                aggregated_json[key] = aggregated_json[key][:num_runs]
             else:
                 aggregated_json[key] = incremental_json[key]
 
     @classmethod
     def _merge_tests(cls, aggregated_json, incremental_json, num_runs):
-        """Merge "tests" properties:results, times.
-
-        Args:
-            aggregated_json: aggregated json object.
-            incremental_json: incremental json object.
-            num_runs: number of total runs to include.
-        """
-
-        all_tests = (set(aggregated_json.iterkeys()) |
-                     set(incremental_json.iterkeys()))
+        all_tests = set(aggregated_json.iterkeys()) | set(incremental_json.iterkeys())
         for test_name in all_tests:
             if test_name in aggregated_json:
                 aggregated_test = aggregated_json[test_name]
@@ -231,76 +145,35 @@
                     results = [[1, JSON_RESULTS_NO_DATA]]
                     times = [[1, 0]]
 
-                cls._insert_item_run_length_encoded(
-                    results, aggregated_test[JSON_RESULTS_RESULTS], num_runs)
-                cls._insert_item_run_length_encoded(
-                    times, aggregated_test[JSON_RESULTS_TIMES], num_runs)
+                cls._insert_item_run_length_encoded(results, aggregated_test[JSON_RESULTS_RESULTS], num_runs)
+                cls._insert_item_run_length_encoded(times, aggregated_test[JSON_RESULTS_TIMES], num_runs)
                 cls._normalize_results_json(test_name, aggregated_json, num_runs)
             else:
                 aggregated_json[test_name] = incremental_json[test_name]
 
     @classmethod
     def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item, num_runs):
-        """Inserts the incremental run-length encoded results into the aggregated
-           run-length encoded results.
-
-        Args:
-            incremental_item: incremental run-length encoded results.
-            aggregated_item: aggregated run-length encoded results.
-            num_runs: number of total runs to include.
-        """
-
         for item in incremental_item:
             if len(aggregated_item) and item[1] == aggregated_item[0][1]:
-                aggregated_item[0][0] = min(
-                    aggregated_item[0][0] + item[0], num_runs)
+                aggregated_item[0][0] = min(aggregated_item[0][0] + item[0], num_runs)
             else:
                 aggregated_item.insert(0, item)
 
     @classmethod
     def _normalize_results_json(cls, test_name, aggregated_json, num_runs):
-        """ Prune tests where all runs pass or tests that no longer exist and
-        truncate all results to num_runs.
-
-        Args:
-          test_name: Name of the test.
-          aggregated_json: The JSON object with all the test results for
-                           this builder.
-          num_runs: number of total runs to include.
-        """
-
         aggregated_test = aggregated_json[test_name]
-        aggregated_test[JSON_RESULTS_RESULTS] = \
-            cls._remove_items_over_max_number_of_builds(
-                aggregated_test[JSON_RESULTS_RESULTS], num_runs)
-        aggregated_test[JSON_RESULTS_TIMES] = \
-            cls._remove_items_over_max_number_of_builds(
-                aggregated_test[JSON_RESULTS_TIMES], num_runs)
+        aggregated_test[JSON_RESULTS_RESULTS] = cls._remove_items_over_max_number_of_builds(aggregated_test[JSON_RESULTS_RESULTS], num_runs)
+        aggregated_test[JSON_RESULTS_TIMES] = cls._remove_items_over_max_number_of_builds(aggregated_test[JSON_RESULTS_TIMES], num_runs)
 
-        is_all_pass = cls._is_results_all_of_type(
-            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
-        is_all_no_data = cls._is_results_all_of_type(
-            aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
+        is_all_pass = cls._is_results_all_of_type(aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_PASS)
+        is_all_no_data = cls._is_results_all_of_type(aggregated_test[JSON_RESULTS_RESULTS], JSON_RESULTS_NO_DATA)
 
-        max_time = max(
-            [time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
-        # Remove all passes/no-data from the results to reduce noise and
-        # filesize. If a test passes every run, but
-        # takes >= JSON_RESULTS_MIN_TIME to run, don't throw away the data.
-        if (is_all_no_data or
-           (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
+        max_time = max([time[1] for time in aggregated_test[JSON_RESULTS_TIMES]])
+        if (is_all_no_data or (is_all_pass and max_time < JSON_RESULTS_MIN_TIME)):
             del aggregated_json[test_name]
 
     @classmethod
     def _remove_items_over_max_number_of_builds(cls, encoded_list, num_runs):
-        """Removes items from the run-length encoded list after the final
-        item that exceeds the max number of builds to track.
-
-        Args:
-          encoded_results: run-length encoded results. An array of arrays, e.g.
-              [[3,'A'],[1,'Q']] encodes AAAQ.
-          num_runs: number of total runs to include.
-        """
         num_builds = 0
         index = 0
         for result in encoded_list:
@@ -313,22 +186,11 @@
 
     @classmethod
     def _is_results_all_of_type(cls, results, type):
-        """Returns whether all the results are of the given type
-        (e.g. all passes).
-        """
-
         return len(results) == 1 and results[0][1] == type
 
     @classmethod
     def _flatten_json_tests(cls, json, prefix=None):
-        """Flattens a trie directory structure in tests into a flat structure.
-
-        Args:
-            json: json tests structure.
-            prefix: aleady-computed path to append to the eventual test name, if any.
-
-        Returns:
-            The flattened json tests structure.
+        """Flattens a trie directory structure of tests into a flat structure.
         """
         result = {}
         for name, test in json.iteritems():
@@ -346,18 +208,6 @@
 
     @classmethod
     def _check_json(cls, builder, json):
-        """Check whether the given json is valid.
-        Converts partially-supported json to supported version json.
-
-        Args:
-            builder: builder name this json is for.
-            json: json object to check and convert if necessary.
-
-        Returns:
-            True if the json is valid or
-            False otherwise.
-        """
-
         version = json[JSON_RESULTS_VERSION_KEY]
         if version > JSON_RESULTS_HIERARCHICAL_VERSION:
             logging.error("Results JSON version '%s' is not supported.",
@@ -385,19 +235,6 @@
 
     @classmethod
     def merge(cls, builder, aggregated, incremental, num_runs, sort_keys=False):
-        """Merge incremental json file data with aggregated json file data.
-
-        Args:
-            builder: builder name.
-            aggregated: aggregated json file data.
-            incremental: incremental json file data.
-            sort_key: whether or not to sort key when dumping json results.
-
-        Returns:
-            Merged json file data if merge succeeds or
-            None on failure.
-        """
-
         if not incremental:
             logging.warning("Nothing to merge.")
             return None
@@ -434,20 +271,6 @@
 
     @classmethod
     def update(cls, master, builder, test_type, incremental):
-        """Update datastore json file data by merging it with incremental json
-           file. Writes the large file and a small file. The small file just stores
-           fewer runs.
-
-        Args:
-            master: master name.
-            builder: builder name.
-            test_type: type of test results.
-            incremental: incremental json file data to merge.
-
-        Returns:
-            Large TestFile object if update succeeds or
-            None on failure.
-        """
         small_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE_SMALL, JSON_RESULTS_MAX_BUILDS_SMALL)
         large_file_updated = cls.update_file(master, builder, test_type, incremental, JSON_RESULTS_FILE, JSON_RESULTS_MAX_BUILDS)
 
@@ -470,27 +293,13 @@
             logging.info("No existing json results, incremental json is saved.")
 
         if not new_results or not file.save(new_results):
-            logging.info(
-                "Update failed, master: %s, builder: %s, test_type: %s, name: %s." %
-                (master, builder, test_type, filename))
+            logging.info("Update failed, master: %s, builder: %s, test_type: %s, name: %s." % (master, builder, test_type, filename))
             return False
 
         return True
 
     @classmethod
     def get_test_list(cls, builder, json_file_data):
-        """Get list of test names from aggregated json file data.
-
-        Args:
-            json_file_data: json file data that has all test-data and
-                            non-test-data.
-
-        Returns:
-            json file with test name list only. The json format is the same
-            as the one saved in datastore, but all non-test-data and test detail
-            results are removed.
-        """
-
         logging.debug("Loading test results json...")
         json = cls._load_json(json_file_data)
         if not json:
@@ -502,7 +311,6 @@
 
         test_list_json = {}
         tests = json[builder][JSON_RESULTS_TESTS]
-        test_list_json[builder] = {
-            "tests": dict.fromkeys(tests, {})}
+        test_list_json[builder] = {"tests": dict.fromkeys(tests, {})}
 
         return cls._generate_file_data(test_list_json)

Modified: trunk/Tools/TestResultServer/model/jsonresults_unittest.py (91490 => 91491)


--- trunk/Tools/TestResultServer/model/jsonresults_unittest.py	2011-07-21 19:30:32 UTC (rev 91490)
+++ trunk/Tools/TestResultServer/model/jsonresults_unittest.py	2011-07-21 19:45:46 UTC (rev 91491)
@@ -141,9 +141,7 @@
     def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
         aggregated_results = self._make_test_json(aggregated_data)
         incremental_results = self._make_test_json(incremental_data)
-        merged_results = JsonResults.merge(self._builder,
-            aggregated_results, incremental_results, max_builds,
-            sort_keys=True)
+        merged_results = JsonResults.merge(self._builder, aggregated_results, incremental_results, max_builds, sort_keys=True)
 
         if expected_data:
             expected_results = self._make_test_json(expected_data)
@@ -158,10 +156,9 @@
         for test in expected_data:
             json_tests.append("\"" + test + "\":{}")
 
-        expected_results = JSON_RESULTS_PREFIX + \
-            JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
-                "[TESTDATA_TESTS]", ",".join(json_tests)) + \
-            JSON_RESULTS_SUFFIX
+        expected_results = (JSON_RESULTS_PREFIX +
+            JSON_RESULTS_TEST_LIST_TEMPLATE.replace("[TESTDATA_TESTS]", ",".join(json_tests)) +
+            JSON_RESULTS_SUFFIX)
 
         actual_results = JsonResults.get_test_list(self._builder, input_results)
         self.assertEquals(actual_results, expected_results)
_______________________________________________
webkit-changes mailing list
[email protected]
http://lists.webkit.org/mailman/listinfo.cgi/webkit-changes

Reply via email to