Title: [248979] trunk/Tools
Revision
248979
Author
jbed...@apple.com
Date
2019-08-21 17:21:50 -0700 (Wed, 21 Aug 2019)

Log Message

results.webkit.org: Add database table to save zip archives to
https://bugs.webkit.org/show_bug.cgi?id=200718

Rubber-stamped by Aakash Jain.

* resultsdbpy/resultsdbpy/model/archive_context.py: Added.
(ArchiveContext):
(ArchiveContext.ArchivesByCommit): Store archives to be retrieved by commit and configuration.
(ArchiveContext.ArchivesByCommit.unpack):
(ArchiveContext.assert_zipfile):
(ArchiveContext.open_zipfile):
(ArchiveContext.__init__):
(ArchiveContext.__enter__):
(ArchiveContext.__exit__):
(ArchiveContext.register): Save the provided archive to Cassandra.
(ArchiveContext.find_archive): Find all archives matching the specified criteria.
(ArchiveContext._files_for_archive):
(ArchiveContext.ls): List all files in the matching archives.
(ArchiveContext.file): Extract a file from the matching archives.
* resultsdbpy/resultsdbpy/model/archive_context_unittest.py: Added.
(ArchiveContextTest):
* resultsdbpy/resultsdbpy/model/mock_model_factory.py:
(MockModelFactory):
(MockModelFactory.process_results):
(MockModelFactory.add_mock_archives):
* resultsdbpy/resultsdbpy/model/model.py:
(Model.__init__): Add an ArchiveContext to the model.

Modified Paths

Added Paths

Diff

Modified: trunk/Tools/ChangeLog (248978 => 248979)


--- trunk/Tools/ChangeLog	2019-08-22 00:11:28 UTC (rev 248978)
+++ trunk/Tools/ChangeLog	2019-08-22 00:21:50 UTC (rev 248979)
@@ -1,3 +1,33 @@
+2019-08-21  Jonathan Bedard  <jbed...@apple.com>
+
+        results.webkit.org: Add database table to save zip archives to
+        https://bugs.webkit.org/show_bug.cgi?id=200718
+
+        Rubber-stamped by Aakash Jain.
+
+        * resultsdbpy/resultsdbpy/model/archive_context.py: Added.
+        (ArchiveContext):
+        (ArchiveContext.ArchivesByCommit): Store archives to be retrieved by commit and configuration.
+        (ArchiveContext.ArchivesByCommit.unpack):
+        (ArchiveContext.assert_zipfile):
+        (ArchiveContext.open_zipfile):
+        (ArchiveContext.__init__):
+        (ArchiveContext.__enter__):
+        (ArchiveContext.__exit__):
+        (ArchiveContext.register): Save the provided archive to Cassandra.
+        (ArchiveContext.find_archive): Find all archives matching the specified criteria.
+        (ArchiveContext._files_for_archive):
+        (ArchiveContext.ls): List all files in the matching archives.
+        (ArchiveContext.file): Extract a file from the matching archives.
+        * resultsdbpy/resultsdbpy/model/archive_context_unittest.py: Added.
+        (ArchiveContextTest):
+        * resultsdbpy/resultsdbpy/model/mock_model_factory.py:
+        (MockModelFactory):
+        (MockModelFactory.process_results):
+        (MockModelFactory.add_mock_archives):
+        * resultsdbpy/resultsdbpy/model/model.py:
+        (Model.__init__): Add an ArchiveContext to the model.
+
 2019-08-21  Aakash Jain  <aakash_j...@apple.com>
 
         [ews-build] view layout test results option should be displayed next to layout-test build step

Added: trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context.py (0 => 248979)


--- trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context.py	                        (rev 0)
+++ trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context.py	2019-08-22 00:21:50 UTC (rev 248979)
@@ -0,0 +1,193 @@
+# Copyright (C) 2019 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1.  Redistributions of source code must retain the above copyright
+#     notice, this list of conditions and the following disclaimer.
+# 2.  Redistributions in binary form must reproduce the above copyright
+#     notice, this list of conditions and the following disclaimer in the
+#     documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import calendar
+import io
+import time
+import zipfile
+
+from cassandra.cqlengine import columns
+from datetime import datetime
+from resultsdbpy.controller.commit import Commit
+from resultsdbpy.model.commit_context import CommitContext
+from resultsdbpy.model.configuration_context import ClusteredByConfiguration
+from resultsdbpy.model.upload_context import UploadContext
+
+
+def _get_time(input_time):
+    if isinstance(input_time, datetime):
+        return calendar.timegm(input_time.timetumple())
+    if input_time:
+        return int(input_time)
+    return None
+
+
+class ArchiveContext(object):
+    DEFAULT_LIMIT = 10
+
+    class ArchivesByCommit(ClusteredByConfiguration):
+        __table_name__ = 'archives_by_commit'
+        suite = columns.Text(partition_key=True, required=True)
+        branch = columns.Text(partition_key=True, required=True)
+        uuid = columns.BigInt(primary_key=True, required=True, clustering_order='DESC')
+        sdk = columns.Text(primary_key=True, required=True)
+        start_time = columns.BigInt(primary_key=True, required=True)
+        archive = columns.Blob(required=True)
+
+        def unpack(self):
+            return dict(
+                uuid=self.uuid,
+                start_time=self.start_time,
+                archive=io.BytesIO(self.archive),
+            )
+
+    @classmethod
+    def assert_zipfile(cls, archive):
+        if not isinstance(archive, io.BytesIO):
+            raise TypeError(f'Archive expected {io.BytesIO}, got {type(archive)} instead')
+        if not zipfile.is_zipfile(archive):
+            raise TypeError(f'Archive is not a zipfile')
+
+    @classmethod
+    def open_zipfile(cls, archive):
+        cls.assert_zipfile(archive)
+        return zipfile.ZipFile(archive, mode='r')
+
+    def __init__(self, configuration_context, commit_context, ttl_seconds=None):
+        self.configuration_context = configuration_context
+        self.commit_context = commit_context
+        self.cassandra = self.configuration_context.cassandra
+        self.ttl_seconds = ttl_seconds
+
+        with self:
+            self.cassandra.create_table(self.ArchivesByCommit)
+            self.cassandra.create_table(UploadContext.SuitesByConfiguration)
+
+    def __enter__(self):
+        self.configuration_context.__enter__()
+        self.commit_context.__enter__()
+
+    def __exit__(self, *args, **kwargs):
+        self.commit_context.__exit__(*args, **kwargs)
+        self.configuration_context.__exit__(*args, **kwargs)
+
+    def register(self, archive, configuration, commits, suite, timestamp=None):
+        self.assert_zipfile(archive)
+        timestamp = _get_time(timestamp) or time.time()
+
+        with self:
+            uuid = self.commit_context.uuid_for_commits(commits)
+            ttl = int((uuid // Commit.TIMESTAMP_TO_UUID_MULTIPLIER) + self.ttl_seconds - time.time()) if self.ttl_seconds else None
+
+            self.configuration_context.register_configuration(configuration, timestamp=timestamp)
+
+            for branch in self.commit_context.branch_keys_for_commits(commits):
+                self.configuration_context.insert_row_with_configuration(
+                    UploadContext.SuitesByConfiguration.__table_name__, configuration, suite=suite, ttl=ttl,
+                )
+                self.configuration_context.insert_row_with_configuration(
+                    self.ArchivesByCommit.__table_name__, configuration=configuration, suite=suite,
+                    branch=branch, uuid=uuid, ttl=ttl,
+                    archive=archive.getvalue(), sdk=configuration.sdk or '?', start_time=timestamp,
+                )
+
+    def find_archive(
+            self, configurations=None, suite=None, recent=True,
+            branch=None, begin=None, end=None,
+            begin_query_time=None, end_query_time=None,
+            limit=DEFAULT_LIMIT,
+    ):
+        if not configurations:
+            configurations = []
+        if not isinstance(suite, str):
+            raise TypeError(f'Expected type {str}, got {type(suite)}')
+
+        with self:
+            result = {}
+            for configuration in configurations:
+                result.update({config: [value.unpack() for value in values] for config, values in self.configuration_context.select_from_table_with_configurations(
+                    self.ArchivesByCommit.__table_name__, configurations=[configuration], recent=recent,
+                    suite=suite, sdk=configuration.sdk, branch=branch or self.commit_context.DEFAULT_BRANCH_KEY,
+                    uuid__gte=CommitContext.convert_to_uuid(begin),
+                    uuid__lte=CommitContext.convert_to_uuid(end, CommitContext.timestamp_to_uuid()),
+                    start_time__gte=_get_time(begin_query_time), start_time__lte=_get_time(end_query_time),
+                    limit=limit,
+                ).items()})
+            return result
+
+    @classmethod
+    def _files_for_archive(cls, archive):
+        master = None
+        files = set()
+        for item in archive.namelist():
+            if item.startswith('__'):
+                continue
+
+            if not master:
+                master = item
+                continue
+
+            files.add(item[len(master):])
+        return master, sorted([*files])
+
+    def ls(self, *args, **kwargs):
+        candidates = self.find_archive(*args, **kwargs)
+        result = {}
+        for config, archives in candidates.items():
+            result[config] = []
+            for archive in archives:
+                result[config].append(dict(
+                    uuid=archive['uuid'],
+                    start_time=archive['start_time'],
+                    files=set(),
+                ))
+
+                with self.open_zipfile(archive['archive']) as unpacked:
+                    _, result[config][-1]['files'] = self._files_for_archive(unpacked)
+
+        return result
+
+    def file(self, path=None, **kwargs):
+        candidates = self.find_archive(**kwargs)
+        result = {}
+        for config, archives in candidates.items():
+            result[config] = []
+            for archive in archives:
+                file = None
+
+                with self.open_zipfile(archive['archive']) as unpacked:
+                    master, files = self._files_for_archive(unpacked)
+                    if not path:
+                        file = files
+                    elif path[-1] == '/':
+                        file = [f[len(path):] for f in files if f.startswith(path)]
+                    elif path in files:
+                        file = unpacked.open(master + path).read()
+
+                if file:
+                    result[config].append(dict(
+                        uuid=archive['uuid'],
+                        start_time=archive['start_time'],
+                        file=file,
+                    ))
+
+        return result

Added: trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context_unittest.py (0 => 248979)


--- trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context_unittest.py	                        (rev 0)
+++ trunk/Tools/resultsdbpy/resultsdbpy/model/archive_context_unittest.py	2019-08-22 00:21:50 UTC (rev 248979)
@@ -0,0 +1,90 @@
+# Copyright (C) 2019 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1.  Redistributions of source code must retain the above copyright
+#     notice, this list of conditions and the following disclaimer.
+# 2.  Redistributions in binary form must reproduce the above copyright
+#     notice, this list of conditions and the following disclaimer in the
+#     documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import base64
+
+from fakeredis import FakeStrictRedis
+from redis import StrictRedis
+from resultsdbpy.controller.configuration import Configuration
+from resultsdbpy.model.cassandra_context import CassandraContext
+from resultsdbpy.model.mock_cassandra_context import MockCassandraContext
+from resultsdbpy.model.mock_model_factory import MockModelFactory
+from resultsdbpy.model.mock_repository import MockSVNRepository
+from resultsdbpy.model.wait_for_docker_test_case import WaitForDockerTestCase
+
+
+class ArchiveContextTest(WaitForDockerTestCase):
+    KEYSPACE = 'archive_test_keyspace'
+
+    def init_database(self, redis=StrictRedis, cassandra=CassandraContext):
+        cassandra.drop_keyspace(keyspace=self.KEYSPACE)
+        self.model = MockModelFactory.create(redis=redis(), cassandra=cassandra(keyspace=self.KEYSPACE, create_keyspace=True))
+        MockModelFactory.add_mock_archives(self.model)
+
+    @WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
+    def test_find_archive(self, redis=StrictRedis, cassandra=CassandraContext):
+        self.init_database(redis=redis, cassandra=cassandra)
+        archives = self.model.archive_context.find_archive(
+            configurations=[Configuration(platform='Mac', style='Release', flavor='wk1')],
+            begin=MockSVNRepository.webkit().commit_for_id(236542), end=MockSVNRepository.webkit().commit_for_id(236542),
+            suite='layout-tests',
+        )
+        self.assertEqual(len(next(iter(archives.values()))), 1)
+        self.assertEqual(next(iter(archives.values()))[0]['uuid'], 153804910800)
+        self.assertEqual(next(iter(archives.values()))[0]['archive'].getvalue(), base64.b64decode(MockModelFactory.ARCHIVE_ZIP))
+
+    @WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
+    def test_archive_list(self, redis=StrictRedis, cassandra=CassandraContext):
+        self.init_database(redis=redis, cassandra=cassandra)
+        files = self.model.archive_context.ls(
+            configurations=[Configuration(platform='Mac', style='Release', flavor='wk1')],
+            begin=MockSVNRepository.webkit().commit_for_id(236542), end=MockSVNRepository.webkit().commit_for_id(236542),
+            suite='layout-tests',
+        )
+        self.assertEqual(len(next(iter(files.values()))), 1)
+        self.assertEqual(next(iter(files.values()))[0]['uuid'], 153804910800)
+        self.assertEqual(next(iter(files.values()))[0]['files'], ['file.txt', 'index.html'])
+
+    @WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
+    def test_file_access(self, redis=StrictRedis, cassandra=CassandraContext):
+        self.init_database(redis=redis, cassandra=cassandra)
+        files = self.model.archive_context.file(
+            path='file.txt',
+            configurations=[Configuration(platform='Mac', style='Release', flavor='wk1')],
+            begin=MockSVNRepository.webkit().commit_for_id(236542), end=MockSVNRepository.webkit().commit_for_id(236542),
+            suite='layout-tests',
+        )
+        self.assertEqual(len(next(iter(files.values()))), 1)
+        self.assertEqual(next(iter(files.values()))[0]['uuid'], 153804910800)
+        self.assertEqual(next(iter(files.values()))[0]['file'], 'data'.encode('ascii'))
+
+    @WaitForDockerTestCase.mock_if_no_docker(mock_redis=FakeStrictRedis, mock_cassandra=MockCassandraContext)
+    def test_file_list(self, redis=StrictRedis, cassandra=CassandraContext):
+        self.init_database(redis=redis, cassandra=cassandra)
+        files = self.model.archive_context.file(
+            configurations=[Configuration(platform='Mac', style='Release', flavor='wk1')],
+            begin=MockSVNRepository.webkit().commit_for_id(236542), end=MockSVNRepository.webkit().commit_for_id(236542),
+            suite='layout-tests',
+        )
+        self.assertEqual(len(next(iter(files.values()))), 1)
+        self.assertEqual(next(iter(files.values()))[0]['uuid'], 153804910800)
+        self.assertEqual(next(iter(files.values()))[0]['file'], ['file.txt', 'index.html'])

Modified: trunk/Tools/resultsdbpy/resultsdbpy/model/mock_model_factory.py (248978 => 248979)


--- trunk/Tools/resultsdbpy/resultsdbpy/model/mock_model_factory.py	2019-08-22 00:11:28 UTC (rev 248978)
+++ trunk/Tools/resultsdbpy/resultsdbpy/model/mock_model_factory.py	2019-08-22 00:21:50 UTC (rev 248979)
@@ -20,6 +20,8 @@
 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
+import base64
+import io
 import time
 
 import calendar
@@ -30,6 +32,32 @@
 
 
 class MockModelFactory(object):
+    ARCHIVE_ZIP = """UEsDBAoAAAAAAAtSBU8AAAAAAAAAAAAAAAAIABAAYXJjaGl2ZS9VWAwAZ2RIXWZkSF31ARQAUEsDBBQACAAIAA9SBU8AAAAAAAAAAAAAAAAQABAAYXJjaGl2ZS9maWxlLnR4dFVYDABovU1d
+    bmRIXfUBFABLSSxJBABQSwcIY/PzrQYAAAAEAAAAUEsDBAoAAAAAABRdCU8AAAAAAAAAAAAAAAAJABAAX19NQUNPU1gvVVgMACi+TV0ovk1d9QEUAFBLAwQKAAAAAAAUXQlPAAAAAAAA
+    AAAAAAAAEQAQAF9fTUFDT1NYL2FyY2hpdmUvVVgMACi+TV0ovk1d9QEUAFBLAwQUAAgACAAPUgVPAAAAAAAAAAAAAAAAGwAQAF9fTUFDT1NYL2FyY2hpdmUvLl9maWxlLnR4dFVYDABo
+    vU1dbmRIXfUBFABjYBVjZ2BiYPBNTFbwD1aIUIACkBgDJxAbMTAwegFpIJ+xhoEo4BgSEgRhgXXcAeIFaEqYoeICDAxSyfm5eokFBTmpejmJxSWlxakpKYklqcoBwVC1b4DYg4GBH6Eu
+    NzE5B2K+CUROFCFXWJpYlJhXkpmXypCd4hELUsUaKK4AVs0w95H9l352x+37375yVmg4n0+cf9BBob6BgYWxtWmKSUpSipGxtWNRckZmWWpMhZFBaElmTmZJpbWBs6GzkbOzpa6FpamF
+    romRm6Wuk7mFi66FqZuxiamLhauriSsDAFBLBwjEE3dr4AAAAHwBAABQSwMEFAAIAAgAzFwJTwAAAAAAAAAAAAAAABIAEABhcmNoaXZlL2luZGV4Lmh0bWxVWAwAor1NXaC9TV31ARQA
+    tVNdb+IwEHz3r9j2qZUCvfbt7hCSSQxYCnHOdsrxmBK3tRRilJj2+u9vbajKfejeDgliMruzM7PJ5GI0IpC6/Vtvn549XKXXcPfp9jPQ/b41wLvtGGjbQkQH6M1g+hfTjAkBaRo7+N4+
+    HLx1HdRdA4fBgO1gcId+a+KdB9vV/Rs8un43JPBq/TO4Pl7dwRPYucY+2m0dGBKoewN70++s96aBfe9ebIMH/1x7/DHI0rbu1XZPsHVdY0PTQGLXzvgvBG7Hv4kawD2+q9m6BusOg0cT
+    vkaVgbF+cC8BOtkngJ/Oebs1CeJ2gBbZAsnHwGjrVzU4ctvWdmf6MYG7P0XgsLMc3kWgv+aAwv6HDjj6izyN2x52pvP1+5pucAMO0R52tTe9rdvhI+y4okB7biGsWy+5AiXmek0lAzyX
+    UtzzjGUw2wAtyxxvFukYLqlC9BJokeF3Q4B9LyVTCoQEvipzjh1IIWmhOVNJaMqrjBeLBGaVhkJoyPmKayzTIsGxjPylD8QcVkymS/xLZzznehMnzrkuwrA5TqNQUql5WuVUEigrWQrF
+    IKjPuEpzylcsGwMKwKHA7lmhQS1pnp+7EdiZikJLjuKEVDBjKI/OEI8jig2SSZbqYOTjlGIwKCxPQJUs5XgIOTC0QeUmCVEgqWLfKqxCFDK6ogt0dfXvNEgIPa0kWwWxGIGqZkpzXWkG
+    CyGyGLJi8p6nTH2FXKgYVKVYgiM0TaIf5MCYEMfiWaV4DIwXmklZlZqL4hqWYo2BoEqKvVlMVhTRLe5DSNwq0oYcYvIJrJcMARmyjGnREIPC1FJ9XoYDMURNznxCwRY5X7AiZQEWgWbN
+    FbvGRXEVCvhx8JpuQFTRNdYET+R4Pnssk7hG4HOg2T0Pyk/VuHnFT49JjC1dnjIfk9FoSsjk2e/aKV5M3Zh+OvHWt2Zqu8b8GAdocnO8M7k5VZDJg2vepvENWxp8A+HV9W1zQSY3RwAr
+    A+VPUEsHCPbdMMviAgAAYQUAAFBLAwQUAAgACADMXAlPAAAAAAAAAAAAAAAAHQAQAF9fTUFDT1NYL2FyY2hpdmUvLl9pbmRleC5odG1sVVgMAKK9TV2gvU1d9QEUAGNgFWNnYGJg8E1M
+    VvAPVohQgAKQGAMnEBsxMDB6AWkgn7GGgSjgGBISBGGBddwB4gVoSpih4gIMDFLJ+bl6iQUFOal6OYnFJaXFqSkpiSWpygHBULVvgNiDgYEfoS43MTkHYr4JRE4UIVdYmliUmFeSmZfK
+    UL/XNxak6qLfEiGwaoa5j+y/9LM7bt//9pWzQsP5fOL8gw4K9Q0MLIytTVNMUpJSjIytHYuSMzLLUmMqjAxCSzJzMksqrQ2cDZ2NnJ0tdS0sTS10TYzcLHWdzC1cdC1M3YxNTF0sXF1N
+    XBkAUEsHCLRBGwrgAAAAfAEAAFBLAwQUAAgACAALUgVPAAAAAAAAAAAAAAAAEgAQAF9fTUFDT1NYLy5fYXJjaGl2ZVVYDABnZEhdZmRIXfUBFABjYBVjZ2BiYPBNTFbwD1aIUIACkBgD
+    JxAbMTAwCgFpIJ/RhYEo4BgSEgRhgXVsAeIJaEqYoOIeDAz8yfm5eokFBTmpermJyTkQ+T8QOVGEXGFpYlFiXklmXioDI0Ntye3fifMcHKZ8fXTEZauLLSPD3Ef2X/rZHbfvf/vKWaHh
+    fD4x7izUNzCwMLY2gAJrx6LkjMyy1JgKI4PQksyczJJKawNnQ2cjZ2dLXQtLUwtdEyM3S10ncwsXXQtTN2MTUxcLV1cTVwYAUEsHCAAolTbHAAAARAEAAFBLAQIVAwoAAAAAAAtSBU8A
+    AAAAAAAAAAAAAAAIAAwAAAAAAAAAAEDtQQAAAABhcmNoaXZlL1VYCABnZEhdZmRIXVBLAQIVAxQACAAIAA9SBU9j8/OtBgAAAAQAAAAQAAwAAAAAAAAAAECkgTYAAABhcmNoaXZlL2Zp
+    bGUudHh0VVgIAGi9TV1uZEhdUEsBAhUDCgAAAAAAFF0JTwAAAAAAAAAAAAAAAAkADAAAAAAAAAAAQP1BigAAAF9fTUFDT1NYL1VYCAAovk1dKL5NXVBLAQIVAwoAAAAAABRdCU8AAAAA
+    AAAAAAAAAAARAAwAAAAAAAAAAED9QcEAAABfX01BQ09TWC9hcmNoaXZlL1VYCAAovk1dKL5NXVBLAQIVAxQACAAIAA9SBU/EE3dr4AAAAHwBAAAbAAwAAAAAAAAAAECkgQABAABfX01B
+    Q09TWC9hcmNoaXZlLy5fZmlsZS50eHRVWAgAaL1NXW5kSF1QSwECFQMUAAgACADMXAlP9t0wy+ICAABhBQAAEgAMAAAAAAAAAABApIE5AgAAYXJjaGl2ZS9pbmRleC5odG1sVVgIAKK9
+    TV2gvU1dUEsBAhUDFAAIAAgAzFwJT7RBGwrgAAAAfAEAAB0ADAAAAAAAAAAAQKSBawUAAF9fTUFDT1NYL2FyY2hpdmUvLl9pbmRleC5odG1sVVgIAKK9TV2gvU1dUEsBAhUDFAAIAAgA
+    C1IFTwAolTbHAAAARAEAABIADAAAAAAAAAAAQKSBpgYAAF9fTUFDT1NYLy5fYXJjaGl2ZVVYCABnZEhdZmRIXVBLBQYAAAAACAAIAF4CAAC9BwAAAAA="""
+    THREE_WEEKS = 60 * 60 * 24 * 21
 
     @classmethod
     def create(cls, redis, cassandra, async_processing=False):
@@ -116,7 +144,7 @@
 
         with model.upload_context:
             current = time.time()
-            old = current - 60 * 60 * 24 * 21
+            old = current - cls.THREE_WEEKS
             for complete_configuration in configurations:
                 if complete_configuration != configuration:
                     continue
@@ -129,7 +157,7 @@
                 cls.iterate_all_commits(model, lambda commits: model.upload_context.upload_test_results(complete_configuration, commits, suite=suite, test_results=test_results, timestamp=timestamp_to_use))
 
     @classmethod
-    def process_results(self, model, configuration=Configuration(), suite='layout-tests'):
+    def process_results(cls, model, configuration=Configuration(), suite='layout-tests'):
         configurations = [configuration] if configuration.is_complete() else ConfigurationContextTest.CONFIGURATIONS
 
         with model.upload_context:
@@ -147,3 +175,22 @@
                                 configuration=config, commits=result['commits'], suite=suite,
                                 test_results=result['test_results'], timestamp=result['timestamp'],
                             )
+
+    @classmethod
+    def add_mock_archives(cls, model, configuration=Configuration(), suite='layout-tests', archive=None):
+        archive = archive or io.BytesIO(base64.b64decode(cls.ARCHIVE_ZIP))
+        configurations = [configuration] if configuration.is_complete() else ConfigurationContextTest.CONFIGURATIONS
+
+        with model.upload_context:
+            current = time.time()
+            old = current - cls.THREE_WEEKS
+            for complete_configuration in configurations:
+                if complete_configuration != configuration:
+                    continue
+
+                timestamp_to_use = current
+                if (complete_configuration.platform == 'Mac' and complete_configuration.version <= Configuration.version_to_integer('10.13')) \
+                   or (complete_configuration.platform == 'iOS' and complete_configuration.version <= Configuration.version_to_integer('11')):
+                    timestamp_to_use = old
+
+                cls.iterate_all_commits(model, lambda commits: model.archive_context.register(archive, complete_configuration, commits, suite=suite, timestamp=timestamp_to_use))

Modified: trunk/Tools/resultsdbpy/resultsdbpy/model/model.py (248978 => 248979)


--- trunk/Tools/resultsdbpy/resultsdbpy/model/model.py	2019-08-22 00:11:28 UTC (rev 248978)
+++ trunk/Tools/resultsdbpy/resultsdbpy/model/model.py	2019-08-22 00:21:50 UTC (rev 248979)
@@ -23,6 +23,7 @@
 import traceback
 import sys
 
+from resultsdbpy.model.archive_context import ArchiveContext
 from resultsdbpy.model.ci_context import CIContext
 from resultsdbpy.model.commit_context import CommitContext
 from resultsdbpy.model.configuration_context import ConfigurationContext
@@ -74,6 +75,12 @@
         for context in [self.suite_context, self.test_context, self.ci_context]:
             self.upload_context.register_upload_callback(context.name, context.register)
 
+        self.archive_context = ArchiveContext(
+            configuration_context=self.configuration_context,
+            commit_context=self.commit_context,
+            ttl_seconds=self.default_ttl_seconds,
+        )
+
     def do_work(self):
         if not self._async_processing:
             raise RuntimeError('No work to be done, asynchronous processing disabled')
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to