Hello community,

here is the log from the commit of package buildstream for openSUSE:Factory 
checked in at 2020-05-15 23:52:18
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/buildstream (Old)
 and      /work/SRC/openSUSE:Factory/.buildstream.new.2738 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "buildstream"

Fri May 15 23:52:18 2020 rev:15 rq:805782 version:1.4.3

Changes:
--------
--- /work/SRC/openSUSE:Factory/buildstream/buildstream.changes  2020-04-02 
17:43:31.753404104 +0200
+++ /work/SRC/openSUSE:Factory/.buildstream.new.2738/buildstream.changes        
2020-05-15 23:52:19.921545924 +0200
@@ -1,0 +2,15 @@
+Wed May 13 19:53:32 UTC 2020 - Bjørn Lie <[email protected]>
+
+- Update to version 1.4.3:
+  + Fix support for conditional list append/prepend in
+    project.conf.
+  + Fix internal imports to import from "collections" instead of
+    "collections.abc", this improves support for Python 3.8.
+  + Fix some downloads from gitlab.com by setting custom user
+    agent.
+  + Work around python API break from ostree's
+    repo.remote_gpg_import(), this was changed in ostree commit
+    v2019.2-10-gaa5df899, and we now have a fallback to support
+    both versions of the API.
+
+-------------------------------------------------------------------

Old:
----
  BuildStream-1.4.2.tar.xz

New:
----
  BuildStream-1.4.3.tar.xz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ buildstream.spec ++++++
--- /var/tmp/diff_new_pack.VOsx0l/_old  2020-05-15 23:52:21.621549200 +0200
+++ /var/tmp/diff_new_pack.VOsx0l/_new  2020-05-15 23:52:21.625549207 +0200
@@ -19,7 +19,7 @@
 %define real_name BuildStream
 
 Name:           buildstream
-Version:        1.4.2
+Version:        1.4.3
 Release:        0
 Summary:        A framework for modelling build pipelines in YAML
 License:        LGPL-2.1-or-later

++++++ BuildStream-1.4.2.tar.xz -> BuildStream-1.4.3.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/BuildStream.egg-info/PKG-INFO 
new/BuildStream-1.4.3/BuildStream.egg-info/PKG-INFO
--- old/BuildStream-1.4.2/BuildStream.egg-info/PKG-INFO 2020-04-01 
16:47:06.000000000 +0200
+++ new/BuildStream-1.4.3/BuildStream.egg-info/PKG-INFO 2020-05-13 
12:38:33.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: BuildStream
-Version: 1.4.2
+Version: 1.4.3
 Summary: A framework for modelling build pipelines in YAML
 Home-page: https://gitlab.com/BuildStream/buildstream
 Author: BuildStream Developers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/BuildStream.egg-info/SOURCES.txt 
new/BuildStream-1.4.3/BuildStream.egg-info/SOURCES.txt
--- old/BuildStream-1.4.2/BuildStream.egg-info/SOURCES.txt      2020-04-01 
16:47:06.000000000 +0200
+++ new/BuildStream-1.4.3/BuildStream.egg-info/SOURCES.txt      2020-05-13 
12:38:33.000000000 +0200
@@ -324,6 +324,7 @@
 tests/format/include.py
 tests/format/include_composition.py
 tests/format/listdirectiveerrors.py
+tests/format/option-list-directive.py
 tests/format/optionarch.py
 tests/format/optionbool.py
 tests/format/optioneltmask.py
@@ -404,6 +405,7 @@
 tests/format/option-flags/project.conf
 tests/format/option-flags-missing/element.bst
 tests/format/option-flags-missing/project.conf
+tests/format/option-list-directive/project.conf
 tests/format/option-overrides/element.bst
 tests/format/option-overrides/project.conf
 tests/format/options/compound-and-condition/element.bst
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/NEWS new/BuildStream-1.4.3/NEWS
--- old/BuildStream-1.4.2/NEWS  2020-04-01 16:45:28.000000000 +0200
+++ new/BuildStream-1.4.3/NEWS  2020-05-13 12:31:19.000000000 +0200
@@ -1,4 +1,22 @@
 =================
+buildstream 1.4.3
+=================
+
+  o Fix support for conditional list append/prepend in project.conf,
+    Merge request !1857
+
+  o Fix internal imports to import from "collections" instead
+    of "collections.abc", this improves support for Python 3.8,
+    see issue #831
+
+  o Fix some downloads from gitlab.com by setting custom user agent,
+    fixes issue #1285
+
+  o Work around python API break from ostree's repo.remote_gpg_import(),
+    this was changed in ostree commit v2019.2-10-gaa5df899, and we now
+    have a fallback to support both versions of the API, see merge request 
!1917.
+
+=================
 buildstream 1.4.2
 =================
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/PKG-INFO 
new/BuildStream-1.4.3/PKG-INFO
--- old/BuildStream-1.4.2/PKG-INFO      2020-04-01 16:47:07.000000000 +0200
+++ new/BuildStream-1.4.3/PKG-INFO      2020-05-13 12:38:33.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: BuildStream
-Version: 1.4.2
+Version: 1.4.3
 Summary: A framework for modelling build pipelines in YAML
 Home-page: https://gitlab.com/BuildStream/buildstream
 Author: BuildStream Developers
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/BuildStream-1.4.2/buildstream/_artifactcache/cascache.py 
new/BuildStream-1.4.3/buildstream/_artifactcache/cascache.py
--- old/BuildStream-1.4.2/buildstream/_artifactcache/cascache.py        
2020-04-01 15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/buildstream/_artifactcache/cascache.py        
2020-05-12 10:35:41.000000000 +0200
@@ -49,6 +49,46 @@
 _MAX_PAYLOAD_BYTES = 1024 * 1024
 
 
+class _Attempt():
+
+    def __init__(self, last_attempt=False):
+        self.__passed = None
+        self.__last_attempt = last_attempt
+
+    def passed(self):
+        return self.__passed
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        try:
+            if exc_type is None:
+                self.__passed = True
+            else:
+                self.__passed = False
+                if exc_value is not None:
+                    raise exc_value
+        except grpc.RpcError as e:
+            if e.code() == grpc.StatusCode.UNAVAILABLE:
+                return not self.__last_attempt
+            elif e.code() == grpc.StatusCode.ABORTED:
+                raise CASRemoteError("grpc aborted: {}".format(str(e)),
+                                     detail=e.details(),
+                                     temporary=True) from e
+            else:
+                return False
+        return False
+
+
+def _retry(tries=5):
+    for a in range(tries):
+        attempt = _Attempt(last_attempt=(a == tries - 1))
+        yield attempt
+        if attempt.passed():
+            break
+
+
 class BlobNotFound(ArtifactError):
 
     def __init__(self, blob, msg):
@@ -248,7 +288,9 @@
 
                 request = buildstream_pb2.GetReferenceRequest()
                 request.key = ref
-                response = remote.ref_storage.GetReference(request)
+                for attempt in _retry():
+                    with attempt:
+                        response = remote.ref_storage.GetReference(request)
 
                 tree = remote_execution_pb2.Digest()
                 tree.hash = response.digest.hash
@@ -296,7 +338,9 @@
                 try:
                     request = buildstream_pb2.GetReferenceRequest()
                     request.key = ref
-                    response = remote.ref_storage.GetReference(request)
+                    for attempt in _retry():
+                        with attempt:
+                            response = remote.ref_storage.GetReference(request)
 
                     if response.digest.hash == tree.hash and 
response.digest.size_bytes == tree.size_bytes:
                         # ref is already on the server with the same tree
@@ -313,7 +357,9 @@
                 request.keys.append(ref)
                 request.digest.hash = tree.hash
                 request.digest.size_bytes = tree.size_bytes
-                remote.ref_storage.UpdateReference(request)
+                for attempt in _retry():
+                    with attempt:
+                        remote.ref_storage.UpdateReference(request)
 
                 skipped_remote = False
         except grpc.RpcError as e:
@@ -786,7 +832,9 @@
             remote.init()
 
             request = buildstream_pb2.StatusRequest()
-            response = remote.ref_storage.Status(request)
+            for attempt in _retry():
+                with attempt:
+                    response = remote.ref_storage.Status(request)
 
             if remote_spec.push and not response.allow_updates:
                 q.put('Artifact server does not allow push')
@@ -986,7 +1034,9 @@
                 offset += chunk_size
                 finished = request.finish_write
 
-        response = remote.bytestream.Write(request_stream(resource_name, 
stream))
+        for attempt in _retry():
+            with attempt:
+                response = 
remote.bytestream.Write(request_stream(resource_name, stream))
 
         assert response.committed_size == digest.size_bytes
 
@@ -1003,7 +1053,9 @@
                 d.hash = required_digest.hash
                 d.size_bytes = required_digest.size_bytes
 
-            response = remote.cas.FindMissingBlobs(request)
+            for attempt in _retry():
+                with attempt:
+                    response = remote.cas.FindMissingBlobs(request)
             for missing_digest in response.missing_blob_digests:
                 d = remote_execution_pb2.Digest()
                 d.hash = missing_digest.hash
@@ -1089,7 +1141,9 @@
             self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
             try:
                 request = remote_execution_pb2.GetCapabilitiesRequest()
-                response = self.capabilities.GetCapabilities(request)
+                for attempt in _retry():
+                    with attempt:
+                        response = self.capabilities.GetCapabilities(request)
                 server_max_batch_total_size_bytes = 
response.cache_capabilities.max_batch_total_size_bytes
                 if 0 < server_max_batch_total_size_bytes < 
self.max_batch_total_size_bytes:
                     self.max_batch_total_size_bytes = 
server_max_batch_total_size_bytes
@@ -1102,7 +1156,9 @@
             self.batch_read_supported = False
             try:
                 request = remote_execution_pb2.BatchReadBlobsRequest()
-                response = self.cas.BatchReadBlobs(request)
+                for attempt in _retry():
+                    with attempt:
+                        response = self.cas.BatchReadBlobs(request)
                 self.batch_read_supported = True
             except grpc.RpcError as e:
                 if e.code() != grpc.StatusCode.UNIMPLEMENTED:
@@ -1112,7 +1168,9 @@
             self.batch_update_supported = False
             try:
                 request = remote_execution_pb2.BatchUpdateBlobsRequest()
-                response = self.cas.BatchUpdateBlobs(request)
+                for attempt in _retry():
+                    with attempt:
+                        response = self.cas.BatchUpdateBlobs(request)
                 self.batch_update_supported = True
             except grpc.RpcError as e:
                 if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
@@ -1153,7 +1211,9 @@
         if len(self._request.digests) == 0:
             return
 
-        batch_response = self._remote.cas.BatchReadBlobs(self._request)
+        for attempt in _retry():
+            with attempt:
+                batch_response = self._remote.cas.BatchReadBlobs(self._request)
 
         for response in batch_response.responses:
             if response.status.code == grpc.StatusCode.NOT_FOUND.value[0]:
@@ -1201,7 +1261,9 @@
         if len(self._request.requests) == 0:
             return
 
-        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
+        for attempt in _retry():
+            with attempt:
+                batch_response = 
self._remote.cas.BatchUpdateBlobs(self._request)
 
         for response in batch_response.responses:
             if response.status.code != grpc.StatusCode.OK.value[0]:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/buildstream/_frontend/complete.py 
new/BuildStream-1.4.3/buildstream/_frontend/complete.py
--- old/BuildStream-1.4.2/buildstream/_frontend/complete.py     2020-04-01 
15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/buildstream/_frontend/complete.py     2020-05-12 
10:35:41.000000000 +0200
@@ -31,7 +31,7 @@
 #  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 #  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 #
-import collections
+import collections.abc
 import copy
 import os
 
@@ -218,7 +218,7 @@
         return True
     if cmd_param.nargs == -1:
         return True
-    if isinstance(current_param_values, collections.Iterable) \
+    if isinstance(current_param_values, collections.abc.Iterable) \
             and cmd_param.nargs > 1 and len(current_param_values) < 
cmd_param.nargs:
         return True
     return False
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/buildstream/_ostree.py 
new/BuildStream-1.4.3/buildstream/_ostree.py
--- old/BuildStream-1.4.2/buildstream/_ostree.py        2020-04-01 
15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/buildstream/_ostree.py        2020-05-12 
10:35:41.000000000 +0200
@@ -271,6 +271,20 @@
         try:
             gfile = Gio.File.new_for_uri(key_url)
             stream = gfile.read()
-            repo.remote_gpg_import(remote, stream, None, 0, None)
+
+            # In ostree commit `v2019.2-10-gaa5df899`, the python
+            # facing API was changed by way of modifying the
+            # instrospection annotations.
+            #
+            # This means we need to call this API in two different
+            # ways depending on which ostree version is installed.
+            #
+            try:
+                # New API
+                repo.remote_gpg_import(remote, stream, None, None)
+            except TypeError:
+                # Old API
+                repo.remote_gpg_import(remote, stream, None, 0, None)
+
         except GLib.GError as e:
             raise OSTreeError("Failed to add gpg key from url '{}': 
{}".format(key_url, e.message)) from e
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/buildstream/_project.py 
new/BuildStream-1.4.3/buildstream/_project.py
--- old/BuildStream-1.4.2/buildstream/_project.py       2020-04-01 
15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/buildstream/_project.py       2020-05-13 
12:19:40.000000000 +0200
@@ -562,15 +562,6 @@
     def _load_pass(self, config, output, *,
                    ignore_unknown=False):
 
-        # Element and Source  type configurations will be composited later onto
-        # element/source types, so we delete it from here and run our final
-        # assertion after.
-        output.element_overrides = _yaml.node_get(config, Mapping, 'elements', 
default_value={})
-        output.source_overrides = _yaml.node_get(config, Mapping, 'sources', 
default_value={})
-        config.pop('elements', None)
-        config.pop('sources', None)
-        _yaml.node_final_assertions(config)
-
         self._load_plugin_factories(config, output)
 
         # Load project options
@@ -594,11 +585,16 @@
         # Now resolve any conditionals in the remaining configuration,
         # any conditionals specified for project option declarations,
         # or conditionally specifying the project name; will be ignored.
-        #
-        # Don't forget to also resolve options in the element and source 
overrides.
         output.options.process_node(config)
-        output.options.process_node(output.element_overrides)
-        output.options.process_node(output.source_overrides)
+
+        # Element and Source  type configurations will be composited later onto
+        # element/source types, so we delete it from here and run our final
+        # assertion after.
+        output.element_overrides = _yaml.node_get(config, Mapping, 'elements', 
default_value={})
+        output.source_overrides = _yaml.node_get(config, Mapping, 'sources', 
default_value={})
+        config.pop('elements', None)
+        config.pop('sources', None)
+        _yaml.node_final_assertions(config)
 
         # Load base variables
         output.base_variables = _yaml.node_get(config, Mapping, 'variables')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/buildstream/_version.py 
new/BuildStream-1.4.3/buildstream/_version.py
--- old/BuildStream-1.4.2/buildstream/_version.py       2020-04-01 
16:47:07.000000000 +0200
+++ new/BuildStream-1.4.3/buildstream/_version.py       2020-05-13 
12:38:33.000000000 +0200
@@ -8,11 +8,11 @@
 
 version_json = '''
 {
- "date": "2020-04-01T14:45:22+0000",
+ "date": "2020-05-13T19:31:51+0900",
  "dirty": false,
  "error": null,
- "full-revisionid": "506e1723efc0afbd09361df3c050c88201fdd268",
- "version": "1.4.2"
+ "full-revisionid": "89765b759230c8d47e27fd52527cf3df61be9ae1",
+ "version": "1.4.3"
 }
 '''  # END VERSION_JSON
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/buildstream/_yaml.py 
new/BuildStream-1.4.3/buildstream/_yaml.py
--- old/BuildStream-1.4.2/buildstream/_yaml.py  2020-04-01 15:49:07.000000000 
+0200
+++ new/BuildStream-1.4.3/buildstream/_yaml.py  2020-05-12 10:35:41.000000000 
+0200
@@ -280,7 +280,7 @@
         provenance.members[key] = member
 
         target_value = target.get(key)
-        if isinstance(value, collections.Mapping):
+        if isinstance(value, collections.abc.Mapping):
             node_decorate_dict(filename, target_value, value, toplevel)
         elif isinstance(value, list):
             member.elements = node_decorate_list(filename, target_value, 
value, toplevel)
@@ -295,7 +295,7 @@
         target_item = target[idx]
         element = ElementProvenance(filename, source, idx, toplevel)
 
-        if isinstance(item, collections.Mapping):
+        if isinstance(item, collections.abc.Mapping):
             node_decorate_dict(filename, target_item, item, toplevel)
         elif isinstance(item, list):
             element.elements = node_decorate_list(filename, target_item, item, 
toplevel)
@@ -569,7 +569,7 @@
 #
 def is_composite_list(node):
 
-    if isinstance(node, collections.Mapping):
+    if isinstance(node, collections.abc.Mapping):
         has_directives = False
         has_keys = False
 
@@ -838,7 +838,7 @@
 
         target_value = target.get(key)
 
-        if isinstance(source_value, collections.Mapping):
+        if isinstance(source_value, collections.abc.Mapping):
 
             # Handle creating new dicts on target side
             if target_value is None:
@@ -853,7 +853,7 @@
                 # Add a new provenance member element to the containing dict
                 target_provenance.members[key] = source_provenance.members[key]
 
-            if not isinstance(target_value, collections.Mapping):
+            if not isinstance(target_value, collections.abc.Mapping):
                 raise CompositeTypeError(thispath, type(target_value), 
type(source_value))
 
             # Recurse into matching dictionary
@@ -914,7 +914,7 @@
 #
 def node_sanitize(node):
 
-    if isinstance(node, collections.Mapping):
+    if isinstance(node, collections.abc.Mapping):
 
         result = SanitizedDict()
 
@@ -1052,7 +1052,7 @@
 def node_chain_copy(source):
     copy = ChainMap({}, source)
     for key, value in source.items():
-        if isinstance(value, collections.Mapping):
+        if isinstance(value, collections.abc.Mapping):
             copy[key] = node_chain_copy(value)
         elif isinstance(value, list):
             copy[key] = list_chain_copy(value)
@@ -1065,7 +1065,7 @@
 def list_chain_copy(source):
     copy = []
     for item in source:
-        if isinstance(item, collections.Mapping):
+        if isinstance(item, collections.abc.Mapping):
             copy.append(node_chain_copy(item))
         elif isinstance(item, list):
             copy.append(list_chain_copy(item))
@@ -1080,7 +1080,7 @@
 def node_copy(source):
     copy = {}
     for key, value in source.items():
-        if isinstance(value, collections.Mapping):
+        if isinstance(value, collections.abc.Mapping):
             copy[key] = node_copy(value)
         elif isinstance(value, list):
             copy[key] = list_copy(value)
@@ -1097,7 +1097,7 @@
 def list_copy(source):
     copy = []
     for item in source:
-        if isinstance(item, collections.Mapping):
+        if isinstance(item, collections.abc.Mapping):
             copy.append(node_copy(item))
         elif isinstance(item, list):
             copy.append(list_copy(item))
@@ -1132,7 +1132,7 @@
             raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE,
                             "{}: Attempt to override non-existing 
list".format(provenance))
 
-        if isinstance(value, collections.Mapping):
+        if isinstance(value, collections.abc.Mapping):
             node_final_assertions(value)
         elif isinstance(value, list):
             list_final_assertions(value)
@@ -1140,7 +1140,7 @@
 
 def list_final_assertions(values):
     for value in values:
-        if isinstance(value, collections.Mapping):
+        if isinstance(value, collections.abc.Mapping):
             node_final_assertions(value)
         elif isinstance(value, list):
             list_final_assertions(value)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/BuildStream-1.4.2/buildstream/plugins/sources/_downloadablefilesource.py 
new/BuildStream-1.4.3/buildstream/plugins/sources/_downloadablefilesource.py
--- 
old/BuildStream-1.4.2/buildstream/plugins/sources/_downloadablefilesource.py    
    2020-04-01 15:49:07.000000000 +0200
+++ 
new/BuildStream-1.4.3/buildstream/plugins/sources/_downloadablefilesource.py    
    2020-05-12 10:35:41.000000000 +0200
@@ -107,6 +107,7 @@
                 default_name = os.path.basename(self.url)
                 request = urllib.request.Request(self.url)
                 request.add_header('Accept', '*/*')
+                request.add_header('User-Agent', 'BuildStream/1')
 
                 # We do not use etag in case what we have in cache is
                 # not matching ref in order to be able to recover from
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/BuildStream-1.4.2/tests/format/option-list-directive/project.conf 
new/BuildStream-1.4.3/tests/format/option-list-directive/project.conf
--- old/BuildStream-1.4.2/tests/format/option-list-directive/project.conf       
1970-01-01 01:00:00.000000000 +0100
+++ new/BuildStream-1.4.3/tests/format/option-list-directive/project.conf       
2020-05-12 10:35:41.000000000 +0200
@@ -0,0 +1,18 @@
+name: test
+
+options:
+  shell_mount_devices:
+    type: bool
+    description: whether to mount devices in the shell
+    default: false
+
+shell:
+  host-files:
+  - '/etc/passwd'
+  - '/etc/group'
+
+  (?):
+  - shell_mount_devices:
+      host-files:
+        (>):
+        - '/dev/dri'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/BuildStream-1.4.2/tests/format/option-list-directive.py 
new/BuildStream-1.4.3/tests/format/option-list-directive.py
--- old/BuildStream-1.4.2/tests/format/option-list-directive.py 1970-01-01 
01:00:00.000000000 +0100
+++ new/BuildStream-1.4.3/tests/format/option-list-directive.py 2020-05-12 
10:35:41.000000000 +0200
@@ -0,0 +1,16 @@
+import os
+import pytest
+from tests.testutils.runcli import cli
+
+# Project directory
+DATA_DIR = os.path.dirname(os.path.realpath(__file__))
+
+
[email protected](DATA_DIR)
[email protected]("mount_devices", [("true"), ("false")])
+def test_override(cli, datafiles, mount_devices):
+    project = os.path.join(datafiles.dirname, datafiles.basename, 
"option-list-directive")
+
+    bst_args = ["--option", "shell_mount_devices", mount_devices, "build"]
+    result = cli.run(project=project, silent=True, args=bst_args)
+    result.assert_success()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/tests/sources/ostree.py 
new/BuildStream-1.4.3/tests/sources/ostree.py
--- old/BuildStream-1.4.2/tests/sources/ostree.py       2020-04-01 
15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/tests/sources/ostree.py       2020-05-12 
10:35:41.000000000 +0200
@@ -55,3 +55,40 @@
     result = cli.run(project=project, args=['show', 'target.bst'])
     result.assert_main_error(ErrorDomain.SOURCE, "missing-track-and-ref")
     result.assert_task_error(None, None)
+
+
[email protected](os.path.join(DATA_DIR, 'template'))
+def test_fetch_gpg_verify(cli, tmpdir, datafiles):
+    project = os.path.join(datafiles.dirname, datafiles.basename)
+
+    gpg_homedir = os.path.join(DATA_DIR, "gpghome")
+
+    # Create the repo from 'repofiles' subdir
+    repo = create_repo('ostree', str(tmpdir))
+    ref = repo.create(
+        os.path.join(project, 'repofiles'),
+        gpg_sign="FFFF54C070353B52D046DEB087FA0F41A6EFD9E9",
+        gpg_homedir=gpg_homedir
+    )
+
+    # Write out our test target
+    ostreesource = repo.source_config(ref=ref, gpg_key='test.gpg')
+    element = {
+        'kind': 'import',
+        'sources': [
+            ostreesource
+        ]
+    }
+
+    _yaml.dump(element, os.path.join(project, 'target.bst'))
+
+    # Assert that a fetch is needed
+    assert cli.get_element_state(project, 'target.bst') == 'fetch needed'
+
+    # Now try to fetch it
+    result = cli.run(project=project, args=['fetch', 'target.bst'])
+    result.assert_success()
+
+    # Assert that we are now buildable because the source is
+    # now cached.
+    assert cli.get_element_state(project, 'target.bst') == 'buildable'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/BuildStream-1.4.2/tests/testutils/repo/ostree.py 
new/BuildStream-1.4.3/tests/testutils/repo/ostree.py
--- old/BuildStream-1.4.2/tests/testutils/repo/ostree.py        2020-04-01 
15:49:07.000000000 +0200
+++ new/BuildStream-1.4.3/tests/testutils/repo/ostree.py        2020-05-12 
10:35:41.000000000 +0200
@@ -13,21 +13,31 @@
 
         super(OSTree, self).__init__(directory, subdir)
 
-    def create(self, directory):
+    def create(self, directory, *, gpg_sign=None, gpg_homedir=None):
         subprocess.call(['ostree', 'init',
                          '--repo', self.repo,
                          '--mode', 'archive-z2'])
-        subprocess.call(['ostree', 'commit',
-                         '--repo', self.repo,
-                         '--branch', 'master',
-                         '--subject', 'Initial commit',
-                         directory])
+
+        commit_args = ['ostree', 'commit',
+                       '--repo', self.repo,
+                       '--branch', 'master',
+                       '--subject', 'Initial commit']
+
+        if gpg_sign and gpg_homedir:
+            commit_args += [
+                '--gpg-sign={}'.format(gpg_sign),
+                '--gpg-homedir={}'.format(gpg_homedir)
+            ]
+
+        commit_args += [directory]
+
+        subprocess.call(commit_args)
 
         latest = self.latest_commit()
 
         return latest
 
-    def source_config(self, ref=None):
+    def source_config(self, ref=None, *, gpg_key=None):
         config = {
             'kind': 'ostree',
             'url': 'file://' + self.repo,
@@ -35,6 +45,8 @@
         }
         if ref is not None:
             config['ref'] = ref
+        if gpg_key is not None:
+            config['gpg-key'] = gpg_key
 
         return config
 


Reply via email to