Hello community,
here is the log from the commit of package openSUSE-release-tools for
openSUSE:Factory checked in at 2018-10-23 20:41:20
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/openSUSE-release-tools (Old)
and /work/SRC/openSUSE:Factory/.openSUSE-release-tools.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "openSUSE-release-tools"
Tue Oct 23 20:41:20 2018 rev:140 rq:643844 version:20181023.9b1618e
Changes:
--------
---
/work/SRC/openSUSE:Factory/openSUSE-release-tools/openSUSE-release-tools.changes
2018-10-22 11:23:32.143144258 +0200
+++
/work/SRC/openSUSE:Factory/.openSUSE-release-tools.new/openSUSE-release-tools.changes
2018-10-23 20:42:24.836399257 +0200
@@ -1,0 +2,54 @@
+Tue Oct 23 04:42:56 UTC 2018 - [email protected]
+
+- Update to version 20181023.9b1618e:
+ * If OBS or openQA raise an exception, restart
+ * Require python-pika in CI
+ * First connect to AMQP then fetch initial state
+ * Refactored rabbit-openqa to be based on PubSubConsumer
+ * Adopt pika example on async
+ * Moved the ISO replace_string into OBS attributes
+ * Find a sed like syntax for Staging ISOs
+ * Package rabbit-openqa
+ * Make the names of the openQA checks unique
+ * No need to if loop - we do the same for all of openqa
+ * Link to failed test modules
+ * Fetch all openQA jobs for the ISO every time
+ * Gather initial buildid of repositories
+ * Add helper bot to listen to rabbit bus and feed OBS with openQA Status
+
+-------------------------------------------------------------------
+Sun Oct 21 14:51:39 UTC 2018 - [email protected]
+
+- Update to version 20181021.14837b3:
+ * pkglistgen: fix AttributeError since update_repos() moved to PkgListGen
class
+ * osc-staging: select: document --move and --filter-from options.
+ * osc-staging: select: replace --from with --filter-from.
+
+-------------------------------------------------------------------
+Fri Oct 19 15:06:43 UTC 2018 - [email protected]
+
+- Update to version 20181019.649529a:
+ * metrics/grafana/openqa: set minimum interval of 10s to avoid gaps.
+
+-------------------------------------------------------------------
+Fri Oct 19 14:50:49 UTC 2018 - [email protected]
+
+- Update to version 20181019.7d58952:
+ * repo_checker: utilize 'staging' config option for instantiating StagingAPI.
+ * osclib/cycle: remove need for StagingApi instance in favor of apiurl.
+ * ReviewBot: staging_api(): allow for Staging subproject to be used.
+
+-------------------------------------------------------------------
+Fri Oct 19 09:03:54 UTC 2018 - [email protected]
+
+- Update to version 20181019.b184670:
+ * osclib/conf: include NonFree subproject in openSUSE patterns.
+ * osclib/conf: remove Ports subprojects as they no longer exist.
+
+-------------------------------------------------------------------
+Fri Oct 19 08:31:12 UTC 2018 - [email protected]
+
+- Update to version 20181019.61d9082:
+ * We need the images repo published to have a status check
+
+-------------------------------------------------------------------
Old:
----
openSUSE-release-tools-20181017.3282c9a.obscpio
New:
----
openSUSE-release-tools-20181023.9b1618e.obscpio
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ openSUSE-release-tools.spec ++++++
--- /var/tmp/diff_new_pack.PMG8hb/_old 2018-10-23 20:42:26.960396720 +0200
+++ /var/tmp/diff_new_pack.PMG8hb/_new 2018-10-23 20:42:26.964396715 +0200
@@ -20,7 +20,7 @@
%define source_dir openSUSE-release-tools
%define announcer_filename factory-package-news
Name: openSUSE-release-tools
-Version: 20181017.3282c9a
+Version: 20181023.9b1618e
Release: 0
Summary: Tools to aid in staging and release work for openSUSE/SUSE
License: GPL-2.0-or-later AND MIT
@@ -293,6 +293,16 @@
%description -n osc-plugin-vdelreq
OSC plugin to check for virtually accepted request, see `osc vdelreq --help`.
+%package rabbit-openqa
+Summary: Sync openQA Status Into OBS
+Group: Development/Tools/Other
+BuildArch: noarch
+Requires: osc >= 0.159.0
+
+%description rabbit-openqa
+Bot listening to AMQP bus and syncs openQA job status into OBS for
+staging projects
+
%prep
%setup -q
@@ -388,6 +398,14 @@
%postun pkglistgen
%systemd_postun
+%pre rabbit-openqa
+getent passwd osrt-rabit-openqa > /dev/null || \
+ useradd -r -m -s /sbin/nologin -c "user for
openSUSE-release-tools-rabbit-openqa" osrt-rabit-openqa
+exit 0
+
+%postun rabbit-openqa
+%systemd_postun
+
%files
%defattr(-,root,root,-)
%doc README.md
@@ -436,6 +454,7 @@
%exclude %{_datadir}/%{source_dir}/osc-staging.py
%exclude %{_datadir}/%{source_dir}/osc-vdelreq.py
%exclude %{_datadir}/%{source_dir}/update_crawler.py
+%exclude %{_datadir}/%{source_dir}/rabbit-openqa.py
%dir %{_sysconfdir}/openSUSE-release-tools
%files devel
@@ -571,6 +590,12 @@
%{_unitdir}/[email protected]
%{_unitdir}/[email protected]
+%files rabbit-openqa
+%defattr(-,root,root,-)
+%{_bindir}/osrt-rabbit-openqa
+%{_datadir}/%{source_dir}/rabbit-openqa.py
+%{_unitdir}/osrt-rabbit-openqa.service
+
%files -n osclib
%defattr(-,root,root,-)
%{_datadir}/%{source_dir}/osclib
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.PMG8hb/_old 2018-10-23 20:42:27.008396663 +0200
+++ /var/tmp/diff_new_pack.PMG8hb/_new 2018-10-23 20:42:27.008396663 +0200
@@ -1,6 +1,6 @@
<servicedata>
<service name="tar_scm">
<param
name="url">https://github.com/openSUSE/openSUSE-release-tools.git</param>
- <param
name="changesrevision">3282c9ae5678301abb00c14fbb4021d52dd76c04</param>
+ <param
name="changesrevision">9b1618e7c51660bb6870efcbdf686cb8645abcbb</param>
</service>
</servicedata>
++++++ openSUSE-release-tools-20181017.3282c9a.obscpio ->
openSUSE-release-tools-20181023.9b1618e.obscpio ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/openSUSE-release-tools-20181017.3282c9a/.travis.yml
new/openSUSE-release-tools-20181023.9b1618e/.travis.yml
--- old/openSUSE-release-tools-20181017.3282c9a/.travis.yml 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/.travis.yml 2018-10-23
06:38:17.000000000 +0200
@@ -59,7 +59,7 @@
install:
# urlgrabber needed to install osc from git in requirements.txt
# m2crypto for osc to be runable as used in docker-compose-obs
- - pip install pycurl urlgrabber m2crypto
+ - pip install pycurl urlgrabber m2crypto pika
- pip install -r requirements.txt
- pip install python-coveralls
- pip install nose-exclude
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/PubSubConsumer.py
new/openSUSE-release-tools-20181023.9b1618e/PubSubConsumer.py
--- old/openSUSE-release-tools-20181017.3282c9a/PubSubConsumer.py
1970-01-01 01:00:00.000000000 +0100
+++ new/openSUSE-release-tools-20181023.9b1618e/PubSubConsumer.py
2018-10-23 06:38:17.000000000 +0200
@@ -0,0 +1,333 @@
+import logging
+import pika
+
+
+class PubSubConsumer(object):
+ """This is an example consumer that will handle unexpected interactions
+ with RabbitMQ such as channel and connection closures.
+
+ If RabbitMQ closes the connection, it will reopen it. You should
+ look at the output, as there are limited reasons why the connection may
+ be closed, which usually are tied to permission related issues or
+ socket timeouts.
+
+ If the channel is closed, it will indicate a problem with one of the
+ commands that were issued and that should surface in the output as well.
+
+ """
+
+ def __init__(self, amqp_url, logger):
+ """Create a new instance of the consumer class, passing in the AMQP
+ URL used to connect to RabbitMQ.
+
+ :param str amqp_url: The AMQP url to connect with
+
+ """
+ self._connection = None
+ self._channel = None
+ self._closing = False
+ self._consumer_tag = None
+ self._url = amqp_url
+ self.logger = logger
+
+ def connect(self):
+ """This method connects to RabbitMQ, returning the connection handle.
+ When the connection is established, the on_connection_open method
+ will be invoked by pika.
+
+ :rtype: pika.SelectConnection
+
+ """
+ self.logger.info('Connecting to %s', self._url)
+ return pika.SelectConnection(pika.URLParameters(self._url),
+ self.on_connection_open,
+ stop_ioloop_on_close=False)
+
+ def close_connection(self):
+ """This method closes the connection to RabbitMQ."""
+ self.logger.info('Closing connection')
+ self._connection.close()
+
+ def add_on_connection_close_callback(self):
+ """This method adds an on close callback that will be invoked by pika
+ when RabbitMQ closes the connection to the publisher unexpectedly.
+
+ """
+ self.logger.debug('Adding connection close callback')
+ self._connection.add_on_close_callback(self.on_connection_closed)
+
+ def on_connection_closed(self, connection, reply_code, reply_text):
+ """This method is invoked by pika when the connection to RabbitMQ is
+ closed unexpectedly. Since it is unexpected, we will reconnect to
+ RabbitMQ if it disconnects.
+
+ :param pika.connection.Connection connection: The closed connection obj
+ :param int reply_code: The server provided reply_code if given
+ :param str reply_text: The server provided reply_text if given
+
+ """
+ self._channel = None
+ if self._closing:
+ self._connection.ioloop.stop()
+ else:
+ self.logger.warning('Connection closed, reopening in 5 seconds:
(%s) %s',
+ reply_code, reply_text)
+ self._connection.add_timeout(5, self.reconnect)
+
+ def on_connection_open(self, unused_connection):
+ """This method is called by pika once the connection to RabbitMQ has
+ been established. It passes the handle to the connection object in
+ case we need it, but in this case, we'll just mark it unused.
+
+ :type unused_connection: pika.SelectConnection
+
+ """
+ self.logger.info('Connection opened')
+ self.add_on_connection_close_callback()
+ self.open_channel()
+
+ def reconnect(self):
+ """Will be invoked by the IOLoop timer if the connection is
+ closed. See the on_connection_closed method.
+
+ """
+ # This is the old connection IOLoop instance, stop its ioloop
+ self._connection.ioloop.stop()
+
+ if not self._closing:
+
+ # Create a new connection
+ self._connection = self.connect()
+
+ # There is now a new connection, needs a new ioloop to run
+ self._connection.ioloop.start()
+
+ def add_on_channel_close_callback(self):
+ """This method tells pika to call the on_channel_closed method if
+ RabbitMQ unexpectedly closes the channel.
+
+ """
+ self.logger.debug('Adding channel close callback')
+ self._channel.add_on_close_callback(self.on_channel_closed)
+
+ def on_channel_closed(self, channel, reply_code, reply_text):
+ """Invoked by pika when RabbitMQ unexpectedly closes the channel.
+ Channels are usually closed if you attempt to do something that
+ violates the protocol, such as re-declare an exchange or queue with
+ different parameters. In this case, we'll close the connection
+ to shutdown the object.
+
+ :param pika.channel.Channel: The closed channel
+ :param int reply_code: The numeric reason the channel was closed
+ :param str reply_text: The text reason the channel was closed
+
+ """
+ self.logger.warning('Channel %i was closed: (%s) %s',
+ channel, reply_code, reply_text)
+ self._connection.close()
+
+ def on_channel_open(self, channel):
+ """This method is invoked by pika when the channel has been opened.
+ The channel object is passed in so we can make use of it.
+
+ Since the channel is now open, we'll declare the exchange to use.
+
+ :param pika.channel.Channel channel: The channel object
+
+ """
+ self.logger.debug('Channel opened')
+ self._channel = channel
+ self.add_on_channel_close_callback()
+ self.setup_exchange('pubsub')
+
+ def setup_exchange(self, exchange_name):
+ """Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC
+ command. When it is complete, the on_exchange_declareok method will
+ be invoked by pika.
+
+ :param str|unicode exchange_name: The name of the exchange to declare
+
+ """
+ self.logger.debug('Declaring exchange %s', exchange_name)
+ self._channel.exchange_declare(self.on_exchange_declareok,
+ exchange=exchange_name,
+ exchange_type='topic',
+ passive=True, durable=True)
+
+ def on_exchange_declareok(self, unused_frame):
+ """Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC
+ command.
+
+ :param pika.Frame.Method unused_frame: Exchange.DeclareOk response
frame
+
+ """
+ self.logger.debug('Exchange declared')
+ self._channel.queue_declare(self.on_queue_declareok, exclusive=True)
+
+ def on_queue_declareok(self, method_frame):
+ """Method invoked by pika when the Queue.Declare RPC call made in
+ setup_queue has completed. In this method we will bind the queue
+ and exchange together with the routing key by issuing the Queue.Bind
+ RPC command. When this command is complete, the on_bindok method will
+ be invoked by pika.
+
+ :param pika.frame.Method method_frame: The Queue.DeclareOk frame
+
+ """
+ self.queue_name = method_frame.method.queue
+ self.routing_keys_to_bind = self.routing_keys()
+ self.bind_queue_to_routing_key(self.routing_keys_to_bind.pop())
+
+ def routing_keys(self):
+ return ['#']
+
+ def bind_queue_to_routing_key(self, key):
+ self.logger.info('Binding %s to %s', key, self.queue_name)
+ self._channel.queue_bind(self.on_bindok, self.queue_name, 'pubsub',
key)
+
+ def add_on_cancel_callback(self):
+ """Add a callback that will be invoked if RabbitMQ cancels the consumer
+ for some reason. If RabbitMQ does cancel the consumer,
+ on_consumer_cancelled will be invoked by pika.
+
+ """
+ self.logger.debug('Adding consumer cancellation callback')
+ self._channel.add_on_cancel_callback(self.on_consumer_cancelled)
+
+ def on_consumer_cancelled(self, method_frame):
+ """Invoked by pika when RabbitMQ sends a Basic.Cancel for a consumer
+ receiving messages.
+
+ :param pika.frame.Method method_frame: The Basic.Cancel frame
+
+ """
+ self.logger.info('Consumer was cancelled remotely, shutting down: %r',
+ method_frame)
+ if self._channel:
+ self._channel.close()
+
+ def on_message(self, unused_channel, basic_deliver, properties, body):
+ """Invoked by pika when a message is delivered from RabbitMQ. The
+ channel is passed for your convenience. The basic_deliver object that
+ is passed in carries the exchange, routing key, delivery tag and
+ a redelivered flag for the message. The properties passed in is an
+ instance of BasicProperties with the message properties and the body
+ is the message that was sent.
+
+ :param pika.channel.Channel unused_channel: The channel object
+ :param pika.Spec.Basic.Deliver: basic_deliver method
+ :param pika.Spec.BasicProperties: properties
+ :param str|unicode body: The message body
+
+ """
+ self.logger.info('Received message # %s: %s %s',
+ basic_deliver.delivery_tag,
basic_deliver.routing_key, body)
+
+ def on_cancelok(self, unused_frame):
+ """This method is invoked by pika when RabbitMQ acknowledges the
+ cancellation of a consumer. At this point we will close the channel.
+ This will invoke the on_channel_closed method once the channel has been
+ closed, which will in-turn close the connection.
+
+ :param pika.frame.Method unused_frame: The Basic.CancelOk frame
+
+ """
+ self.logger.debug('RabbitMQ acknowledged the cancellation of the
consumer')
+ self.close_channel()
+
+ def stop_consuming(self):
+ """Tell RabbitMQ that you would like to stop consuming by sending the
+ Basic.Cancel RPC command.
+
+ """
+ if self._channel:
+ self.logger.debug('Sending a Basic.Cancel RPC command to RabbitMQ')
+ self._channel.basic_cancel(self.on_cancelok, self._consumer_tag)
+
+ def start_consuming(self):
+ """This method sets up the consumer by first calling
+ add_on_cancel_callback so that the object is notified if RabbitMQ
+ cancels the consumer. It then issues the Basic.Consume RPC command
+ which returns the consumer tag that is used to uniquely identify the
+ consumer with RabbitMQ. We keep the value to use it when we want to
+ cancel consuming. The on_message method is passed in as a callback pika
+ will invoke when a message is fully received.
+
+ """
+ self.logger.debug('Issuing consumer related RPC commands')
+ self.add_on_cancel_callback()
+ self._consumer_tag = self._channel.basic_consume(self.on_message,
+ self.queue_name,
no_ack=True)
+
+ def on_bindok(self, unused_frame):
+ """Invoked by pika when the Queue.Bind method has completed. At this
+ point we will start consuming messages by calling start_consuming
+ which will invoke the needed RPC commands to start the process.
+
+ :param pika.frame.Method unused_frame: The Queue.BindOk response frame
+
+ """
+ self.logger.debug('Queue bound')
+ if len(self.routing_keys_to_bind):
+ self.bind_queue_to_routing_key(self.routing_keys_to_bind.pop())
+ else:
+ self.start_consuming()
+
+ def close_channel(self):
+ """Call to close the channel with RabbitMQ cleanly by issuing the
+ Channel.Close RPC command.
+
+ """
+ self.logger.debug('Closing the channel')
+ self._channel.close()
+
+ def open_channel(self):
+ """Open a new channel with RabbitMQ by issuing the Channel.Open RPC
+ command. When RabbitMQ responds that the channel is open, the
+ on_channel_open callback will be invoked by pika.
+
+ """
+ self.logger.debug('Creating a new channel')
+ self._connection.channel(on_open_callback=self.on_channel_open)
+
+ def run(self):
+ """Run the example consumer by connecting to RabbitMQ and then
+ starting the IOLoop to block and allow the SelectConnection to operate.
+
+ """
+ self._connection = self.connect()
+ self._connection.ioloop.start()
+
+ def stop(self):
+ """Cleanly shutdown the connection to RabbitMQ by stopping the consumer
+ with RabbitMQ. When RabbitMQ confirms the cancellation, on_cancelok
+ will be invoked by pika, which will then closing the channel and
+ connection. The IOLoop is started again because this method is invoked
+ when CTRL-C is pressed raising a KeyboardInterrupt exception. This
+ exception stops the IOLoop which needs to be running for pika to
+ communicate with RabbitMQ. All of the commands issued prior to starting
+ the IOLoop will be buffered but not processed.
+
+ """
+ self.logger.debug('Stopping')
+ self._closing = True
+ self.stop_consuming()
+ self._connection.ioloop.start()
+ self.logger.debug('Stopped')
+
+
+def main():
+ LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) '
+ '-35s %(lineno) -5d: %(message)s')
+
+ logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
+ example = PubSubConsumer('amqps://opensuse:[email protected]',
+ logging.getLogger(__name__))
+ try:
+ example.run()
+ except KeyboardInterrupt:
+ example.stop()
+
+
+if __name__ == '__main__':
+ main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/openSUSE-release-tools-20181017.3282c9a/ReviewBot.py
new/openSUSE-release-tools-20181023.9b1618e/ReviewBot.py
--- old/openSUSE-release-tools-20181017.3282c9a/ReviewBot.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/ReviewBot.py 2018-10-23
06:38:17.000000000 +0200
@@ -124,6 +124,12 @@
self.config = self._load_config()
def staging_api(self, project):
+ # Allow for the Staging subproject to be passed directly from config
+ # which should be stripped before initializing StagingAPI. This allows
+ # for NonFree subproject to utilize StagingAPI for main project.
+ if project.endswith(':Staging'):
+ project = project[:-8]
+
if project not in self.staging_apis:
Config.get(self.apiurl, project)
self.staging_apis[project] = StagingAPI(self.apiurl, project)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/dist/package/openSUSE-release-tools.spec
new/openSUSE-release-tools-20181023.9b1618e/dist/package/openSUSE-release-tools.spec
---
old/openSUSE-release-tools-20181017.3282c9a/dist/package/openSUSE-release-tools.spec
2018-10-17 23:12:55.000000000 +0200
+++
new/openSUSE-release-tools-20181023.9b1618e/dist/package/openSUSE-release-tools.spec
2018-10-23 06:38:17.000000000 +0200
@@ -293,6 +293,16 @@
%description -n osc-plugin-vdelreq
OSC plugin to check for virtually accepted request, see `osc vdelreq --help`.
+%package rabbit-openqa
+Summary: Sync openQA Status Into OBS
+Group: Development/Tools/Other
+BuildArch: noarch
+Requires: osc >= 0.159.0
+
+%description rabbit-openqa
+Bot listening to AMQP bus and syncs openQA job status into OBS for
+staging projects
+
%prep
%setup -q
@@ -388,6 +398,14 @@
%postun pkglistgen
%systemd_postun
+%pre rabbit-openqa
+getent passwd osrt-rabit-openqa > /dev/null || \
+ useradd -r -m -s /sbin/nologin -c "user for
openSUSE-release-tools-rabbit-openqa" osrt-rabit-openqa
+exit 0
+
+%postun rabbit-openqa
+%systemd_postun
+
%files
%defattr(-,root,root,-)
%doc README.md
@@ -436,6 +454,7 @@
%exclude %{_datadir}/%{source_dir}/osc-staging.py
%exclude %{_datadir}/%{source_dir}/osc-vdelreq.py
%exclude %{_datadir}/%{source_dir}/update_crawler.py
+%exclude %{_datadir}/%{source_dir}/rabbit-openqa.py
%dir %{_sysconfdir}/openSUSE-release-tools
%files devel
@@ -571,6 +590,12 @@
%{_unitdir}/[email protected]
%{_unitdir}/[email protected]
+%files rabbit-openqa
+%defattr(-,root,root,-)
+%{_bindir}/osrt-rabbit-openqa
+%{_datadir}/%{source_dir}/rabbit-openqa.py
+%{_unitdir}/osrt-rabbit-openqa.service
+
%files -n osclib
%defattr(-,root,root,-)
%{_datadir}/%{source_dir}/osclib
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/metrics/grafana/openqa.json
new/openSUSE-release-tools-20181023.9b1618e/metrics/grafana/openqa.json
--- old/openSUSE-release-tools-20181017.3282c9a/metrics/grafana/openqa.json
2018-10-17 23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/metrics/grafana/openqa.json
2018-10-23 06:38:17.000000000 +0200
@@ -62,6 +62,7 @@
"y": 0
},
"id": 2,
+ "interval": "10s",
"legend": {
"avg": false,
"current": false,
@@ -253,6 +254,7 @@
"y": 0
},
"id": 6,
+ "interval": "10s",
"legend": {
"alignAsTable": false,
"avg": false,
@@ -379,6 +381,7 @@
"y": 9
},
"id": 4,
+ "interval": "10s",
"legend": {
"avg": false,
"current": false,
@@ -502,6 +505,7 @@
"y": 9
},
"id": 8,
+ "interval": "10s",
"legend": {
"avg": false,
"current": false,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/osc-staging.py
new/openSUSE-release-tools-20181023.9b1618e/osc-staging.py
--- old/openSUSE-release-tools-20181017.3282c9a/osc-staging.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/osc-staging.py 2018-10-23
06:38:17.000000000 +0200
@@ -88,8 +88,8 @@
help='split the requests into individual groups')
@cmdln.option('--supersede', action='store_true',
help='replace staged requests when superseded')
[email protected]('-f', '--from', dest='from_', metavar='FROMPROJECT',
- help='specify a source project when moving a request')
[email protected]('--filter-from', metavar='STAGING',
+ help='filter request list to only those from a specific staging')
@cmdln.option('-p', '--project', dest='project', metavar='PROJECT',
help='indicate the project on which to operate, default is
openSUSE:Factory')
@cmdln.option('--add', dest='add', metavar='PACKAGE',
@@ -285,6 +285,25 @@
These concepts can be combined and interactive mode allows the proposal
to be modified before it is executed.
+ Moving requests can be accomplished using the --move flag. For example,
+ to move already staged pac1 and pac2 to staging B use the following.
+
+ select --move B pac1 pac2
+
+ The staging in which the requests are staged will automatically be
+ determined and the requests will be removed from that staging and
placed
+ in the specified staging.
+
+ Related to this, the --filter-from option may be used in conjunction
+ with --move to only move requests already staged in a specific staging.
+ This can be useful if a staging master is responsible for a specific
set
+ of packages and wants to move them into a different staging when they
+ were already placed in a mixed staging. For example, if one had a file
+ with a list of packages the following would move any of them found in
+ staging A to staging B.
+
+ select --move --filter-from A B $(< package.list)
+
"unselect" will remove from the project - pushing them back to the backlog
If a message is included the requests will be ignored first.
@@ -324,7 +343,7 @@
osc staging unignore [--cleanup] [REQUEST...|all]
osc staging list [--supersede]
osc staging lock [-m MESSAGE]
- osc staging select [--no-freeze] [--move [--from STAGING]]
+ osc staging select [--no-freeze] [--move [--filter-from STAGING]]
[--add PACKAGE]
STAGING REQUEST...
osc staging select [--no-freeze] [--interactive|--non-interactive]
@@ -524,8 +543,8 @@
requests.append(arg)
if len(stagings) != 1 or len(requests) == 0 or opts.filter_by or
opts.group_by:
- if opts.move or opts.from_:
- print('--move and --from must be used with explicit
staging and request list')
+ if opts.move or opts.filter_from:
+ print('--move and --filter-from must be used with explicit
staging and request list')
return
open_requests = api.get_open_requests({'withhistory': 1})
@@ -638,7 +657,8 @@
api.mark_additional_packages(target_project, [opts.add])
else:
SelectCommand(api, target_project) \
- .perform(requests, opts.move, opts.from_,
opts.no_freeze)
+ .perform(requests, opts.move,
+ api.prj_from_short(opts.filter_from),
opts.no_freeze)
elif cmd == 'cleanup_rings':
CleanupRings(api).perform()
elif cmd == 'ignore':
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/osclib/conf.py
new/openSUSE-release-tools-20181023.9b1618e/osclib/conf.py
--- old/openSUSE-release-tools-20181017.3282c9a/osclib/conf.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/osclib/conf.py 2018-10-23
06:38:17.000000000 +0200
@@ -19,7 +19,7 @@
# the project.
DEFAULT = {
- r'openSUSE:(?P<project>Factory(?::Ports)?)$': {
+ r'openSUSE:(?P<project>Factory)(?::NonFree)?$': {
'staging': 'openSUSE:%(project)s:Staging',
'staging-group': 'factory-staging',
'staging-archs': 'i586 x86_64',
@@ -50,7 +50,7 @@
'mail-noreply': '[email protected]',
'mail-release-list': '[email protected]',
},
- r'openSUSE:(?P<project>Leap:(?P<version>[\d.]+)(?::Ports)?)$': {
+ r'openSUSE:(?P<project>Leap:(?P<version>[\d.]+))(?::NonFree)?$': {
'staging': 'openSUSE:%(project)s:Staging',
'staging-group': 'factory-staging',
'staging-archs': 'i586 x86_64',
@@ -99,7 +99,7 @@
'mail-noreply': '[email protected]',
'mail-release-list': '[email protected]',
},
- r'openSUSE:(?P<project>Leap:(?P<version>[\d.]+):Update)$': {
+ r'openSUSE:(?P<project>Leap:(?P<version>[\d.]+)(?::NonFree)?:Update)$': {
'main-repo': 'standard',
'leaper-override-group': 'leap-reviewers',
'repo_checker-arch-whitelist': 'x86_64',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/osclib/cycle.py
new/openSUSE-release-tools-20181023.9b1618e/osclib/cycle.py
--- old/openSUSE-release-tools-20181017.3282c9a/osclib/cycle.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/osclib/cycle.py 2018-10-23
06:38:17.000000000 +0200
@@ -129,8 +129,8 @@
class CycleDetector(object):
"""Class to detect cycles in an OBS project."""
- def __init__(self, api):
- self.api = api
+ def __init__(self, apiurl):
+ self.apiurl = apiurl
# Store packages prevoiusly ignored. Don't pollute the screen.
self._ignore_packages = set()
@@ -138,7 +138,7 @@
root = None
try:
# print('Generating _builddepinfo for (%s, %s, %s)' % (project,
repository, arch))
- url = makeurl(self.api.apiurl, ['build/%s/%s/%s/_builddepinfo' %
(project, repository, arch)])
+ url = makeurl(self.apiurl, ['build/%s/%s/%s/_builddepinfo' %
(project, repository, arch)])
root = http_GET(url).read()
except urllib2.HTTPError as e:
print('ERROR in URL %s [%s]' % (url, e))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/osclib/freeze_command.py
new/openSUSE-release-tools-20181023.9b1618e/osclib/freeze_command.py
--- old/openSUSE-release-tools-20181017.3282c9a/osclib/freeze_command.py
2018-10-17 23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/osclib/freeze_command.py
2018-10-23 06:38:17.000000000 +0200
@@ -177,6 +177,7 @@
ET.SubElement(f, 'disable')
f = ET.SubElement(root, 'publish')
ET.SubElement(f, 'disable')
+ ET.SubElement(f, 'enable', {'repository': 'images'})
f = ET.SubElement(root, 'debuginfo')
ET.SubElement(f, 'enable')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/osclib/select_command.py
new/openSUSE-release-tools-20181023.9b1618e/osclib/select_command.py
--- old/openSUSE-release-tools-20181017.3282c9a/osclib/select_command.py
2018-10-17 23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/osclib/select_command.py
2018-10-23 06:38:17.000000000 +0200
@@ -59,7 +59,7 @@
return candidates[0] if candidates else None
- def select_request(self, request, move, from_):
+ def select_request(self, request, move, filter_from):
supersede = self._supersede(request)
staged_requests = {
@@ -73,12 +73,11 @@
return self.api.rq_to_prj(request, self.target_project)
elif request in staged_requests and (move or supersede):
# 'select' command becomes a 'move'
- fprj = None
- if from_:
- fprj = self.api.prj_from_letter(from_)
- else:
- # supersede = (new_rq, package, project)
- fprj =
self.api.packages_staged[staged_requests[request]]['prj'] if not supersede else
supersede[2]
+ # supersede = (new_rq, package, project)
+ fprj = self.api.packages_staged[staged_requests[request]]['prj']
if not supersede else supersede[2]
+ if filter_from != fprj:
+ print('Ignoring "{}" in "{}" since not in
"{}"'.format(request, fprj, filter_from))
+ return True
if supersede:
print('"{} ({}) is superseded by {}'.format(request,
supersede[1], supersede[0]))
@@ -109,13 +108,13 @@
raise oscerr.WrongArgs('Arguments for select are not correct.')
def perform(self, requests, move=False,
- from_=None, no_freeze=False):
+ filter_from=None, no_freeze=False):
"""
Select package and move it accordingly by arguments
:param target_project: project we want to target
:param requests: requests we are working with
:param move: wether to move the requests or not
- :param from_: location where from move the requests
+ :param filter_from: filter request list to only those from a specific
staging
"""
if self.api.is_adi_project(self.target_project):
@@ -135,7 +134,7 @@
requests_count = len(requests)
for index, request in enumerate(requests, start=1):
print('({}/{}) '.format(index, requests_count), end='')
- if not self.select_request(request, move, from_):
+ if not self.select_request(request, move, filter_from):
return False
# Notify everybody about the changes
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/pkglistgen.py
new/openSUSE-release-tools-20181023.9b1618e/pkglistgen.py
--- old/openSUSE-release-tools-20181017.3282c9a/pkglistgen.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/pkglistgen.py 2018-10-23
06:38:17.000000000 +0200
@@ -1327,7 +1327,7 @@
opts_nonfree = copy.deepcopy(opts)
opts_nonfree.project = nonfree
self.repos = self.tool.expand_repos(nonfree, main_repo)
- self.update_repos(opts_nonfree)
+ self.tool.update_repos(opts_nonfree)
# Switch repo back to main target project.
self.repos = repos_
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/rabbit-openqa.py
new/openSUSE-release-tools-20181023.9b1618e/rabbit-openqa.py
--- old/openSUSE-release-tools-20181017.3282c9a/rabbit-openqa.py
1970-01-01 01:00:00.000000000 +0100
+++ new/openSUSE-release-tools-20181023.9b1618e/rabbit-openqa.py
2018-10-23 06:38:17.000000000 +0200
@@ -0,0 +1,272 @@
+#!/usr/bin/python
+
+import argparse
+import logging
+import pika
+import sys
+import json
+import osc
+import re
+from time import sleep
+from osc.core import http_GET, http_POST, makeurl
+from M2Crypto.SSL import SSLError as SSLError
+from osclib.conf import Config
+from osclib.stagingapi import StagingAPI
+from lxml import etree as ET
+from openqa_client.client import OpenQA_Client
+from openqa_client.exceptions import ConnectionError
+from urllib import quote_plus
+import requests
+try:
+ from urllib.error import HTTPError, URLError
+except ImportError:
+ # python 2.x
+ from urllib2 import HTTPError, URLError
+from PubSubConsumer import PubSubConsumer
+
+
+class Project(object):
+ def __init__(self, name):
+ self.name = name
+ Config(apiurl, name)
+ self.api = StagingAPI(apiurl, name)
+ self.staging_projects = dict()
+ self.listener = None
+ self.replace_string = self.api.attribute_value_load('OpenQAMapping')
+
+ def init(self):
+ for p in self.api.get_staging_projects():
+ if self.api.is_adi_project(p):
+ continue
+ self.staging_projects[p] = self.initial_staging_state(p)
+ self.update_staging_status(p)
+
+ def staging_letter(self, name):
+ return name.split(':')[-1]
+
+ def map_iso(self, staging_project, iso):
+ parts = self.replace_string.split('/')
+ if parts[0] != 's':
+ raise Exception("{}'s iso_replace_string does not start with
s/".format(self.name))
+ old = parts[1]
+ new = parts[2]
+ new = new.replace('$LETTER', self.staging_letter(staging_project))
+ return re.compile(old).sub(new, iso)
+
+ def gather_isos(self, name, repository):
+ url = self.api.makeurl(['published', name, repository, 'iso'])
+ f = self.api.retried_GET(url)
+ root = ET.parse(f).getroot()
+ ret = []
+ for entry in root.findall('entry'):
+ if entry.get('name').endswith('iso'):
+ ret.append(self.map_iso(name, entry.get('name')))
+ return ret
+
+ def gather_buildid(self, name, repository):
+ url = self.api.makeurl(['published', name, repository], {'view':
'status'})
+ f = self.api.retried_GET(url)
+ id = ET.parse(f).getroot().find('buildid')
+ if id is not None:
+ return id.text
+
+ def initial_staging_state(self, name):
+ return {'isos': self.gather_isos(name, 'images'),
+ 'id': self.gather_buildid(name, 'images')}
+
+ def fetch_openqa_jobs(self, staging, iso):
+ buildid = self.staging_projects[staging].get('id')
+ if not buildid:
+ self.logger.info("I don't know the build id of " + staging)
+ return
+ # all openQA jobs are created at the same URL
+ url = self.api.makeurl(['status_reports', 'published', staging,
'images', 'reports', buildid])
+ openqa = self.listener.jobs_for_iso(iso)
+ # collect job infos to pick names
+ openqa_infos = dict()
+ for job in openqa:
+ print(staging, iso, job['id'], job['state'], job['result'],
+ job['settings']['MACHINE'], job['settings']['TEST'])
+ openqa_infos[job['id']] = {'url': self.listener.test_url(job)}
+ openqa_infos[job['id']]['state'] = self.map_openqa_result(job)
+ openqa_infos[job['id']]['name'] = job['settings']['TEST']
+ openqa_infos[job['id']]['machine'] = job['settings']['MACHINE']
+
+ # make sure the names are unique
+ taken_names = dict()
+ for id in openqa_infos:
+ name = openqa_infos[id]['name']
+ if name in taken_names:
+ openqa_infos[id]['name'] = openqa_infos[id]['name'] + "@" +
openqa_infos[id]['machine']
+ # the other id
+ id = taken_names[name]
+ openqa_infos[id]['name'] = openqa_infos[id]['name'] + "@" +
openqa_infos[id]['machine']
+ taken_names[name] = id
+
+ for info in openqa_infos.values():
+ xml = self.openqa_check_xml(info['url'], info['state'],
info['name'])
+ try:
+ http_POST(url, data=xml)
+ except HTTPError:
+ self.logger.error('failed to post status to ' + url)
+
+ def update_staging_status(self, project):
+ for iso in self.staging_projects[project]['isos']:
+ self.fetch_openqa_jobs(project, iso)
+
+ def update_staging_buildid(self, project, repository, buildid):
+ self.staging_projects[project]['id'] = buildid
+ self.staging_projects[project]['isos'] = self.gather_isos(project,
repository)
+ self.update_staging_status(project)
+
+ def check_published_repo(self, project, repository, buildid):
+ if repository != 'images':
+ return
+ for p in self.staging_projects:
+ if project == p:
+ self.update_staging_buildid(project, repository, buildid)
+
+ def matching_project(self, iso):
+ for p in self.staging_projects:
+ if iso in self.staging_projects[p]['isos']:
+ return p
+
+ def map_openqa_result(self, job):
+ if job['result'] in ['passed', 'softfailed']:
+ return 'success'
+ if job['result'] == 'none':
+ return 'pending'
+ return 'failure'
+
+ def openqa_job_change(self, iso):
+ staging = self.matching_project(iso)
+ if not staging:
+ return
+ # we fetch all openqa jobs so we can avoid long job names
+ self.fetch_openqa_jobs(staging, iso)
+
+ def openqa_check_xml(self, url, state, name):
+ check = ET.Element('check')
+ se = ET.SubElement(check, 'url')
+ se.text = url
+ se = ET.SubElement(check, 'state')
+ se.text = state
+ se = ET.SubElement(check, 'name')
+ se.text = name
+ return ET.tostring(check)
+
+
+class Listener(PubSubConsumer):
+ def __init__(self, amqp_prefix, amqp_url, openqa_url):
+ super(Listener, self).__init__(amqp_url, logging.getLogger(__name__))
+ self.projects = []
+ self.amqp_prefix = amqp_prefix
+ self.openqa_url = openqa_url
+ self.openqa = OpenQA_Client(server=openqa_url)
+
+ def routing_keys(self):
+ ret = []
+ for suffix in ['.obs.repo.published', '.openqa.job.done',
+ '.openqa.job.create', '.openqa.job.restart']:
+ ret.append(self.amqp_prefix + suffix)
+ return ret
+
+ def add(self, project):
+ project.listener = self
+ self.projects.append(project)
+
+ def start_consuming(self):
+ # now we are (re-)connected to the bus and need to fetch the
+ # initial state
+ for project in self.projects:
+ self.logger.info('Fetching ISOs of %s', project.name)
+ project.init()
+ self.logger.info('Finished fetching initial ISOs, listening')
+ super(Listener, self).start_consuming()
+
+ def jobs_for_iso(self, iso):
+ values = {
+ 'iso': iso,
+ 'scope': 'current',
+ 'latest': '1',
+ }
+ return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
+
+ def get_step_url(self, testurl, modulename):
+ failurl = testurl +
'/modules/{!s}/fails'.format(quote_plus(modulename))
+ fails = requests.get(failurl).json()
+ failed_step = fails.get('first_failed_step', 1)
+ return "{!s}#step/{!s}/{:d}".format(testurl, modulename, failed_step)
+
+ def test_url(self, job):
+ url = self.openqa_url + ("/tests/%d" % job['id'])
+ if job['result'] == 'failed':
+ for module in job['modules']:
+ if module['result'] == 'failed':
+ return self.get_step_url(url, module['name'])
+ return url
+
+ def on_published_repo(self, payload):
+ for p in self.projects:
+ p.check_published_repo(str(payload['project']),
str(payload['repo']), str(payload['buildid']))
+
+ def on_openqa_job(self, iso):
+ self.logger.debug('openqa_job_change', iso)
+ for p in self.projects:
+ p.openqa_job_change(iso)
+
+ def on_message(self, unused_channel, method, properties, body):
+ if method.routing_key == '{}.obs.repo.published'.format(amqp_prefix):
+ self.on_published_repo(json.loads(body))
+ elif re.search(r'.openqa.', method.routing_key):
+ self.on_openqa_job(json.loads(body).get('ISO'))
+ else:
+ self.logger.warning("unknown rabbitmq message
{}".format(method.routing_key))
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='Bot to sync openQA status to OBS')
+ parser.add_argument("--apiurl", '-A', type=str,
default='https://api.opensuse.org', help='API URL of OBS')
+ parser.add_argument('-s', '--staging', type=str, default=None,
+ help='staging project letter')
+ parser.add_argument('-f', '--force', action='store_true', default=False,
+ help='force the write of the comment')
+ parser.add_argument('-p', '--project', type=str, default='Factory',
+ help='openSUSE version to make the check (Factory,
13.2)')
+ parser.add_argument('-d', '--debug', action='store_true', default=False,
+ help='enable debug information')
+
+ args = parser.parse_args()
+
+ osc.conf.get_config()
+ osc.conf.config['debug'] = args.debug
+
+ apiurl = args.apiurl
+
+ if apiurl.endswith('suse.de'):
+ amqp_prefix = 'suse'
+ amqp_url = "amqps://suse:[email protected]"
+ openqa_url = 'https://openqa.suse.de'
+ else:
+ amqp_prefix = 'opensuse'
+ amqp_url = "amqps://opensuse:[email protected]"
+ openqa_url = 'https://openqa.opensuse.org'
+
+ logging.basicConfig(level=logging.INFO)
+
+ l = Listener(amqp_prefix, amqp_url, openqa_url)
+ url = makeurl(apiurl, ['search', 'project', 'id'], {'match':
'attribute/@name="OSRT:OpenQAMapping"'})
+ f = http_GET(url)
+ root = ET.parse(f).getroot()
+ for entry in root.findall('project'):
+ l.add(Project(entry.get('name')))
+
+ while True:
+ try:
+ l.run()
+ except KeyboardInterrupt:
+ l.stop()
+ except (HTTPError, URLError, ConnectionError, SSLError):
+ # OBS/openQA hickup
+ sleep(10)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/repo_checker.py
new/openSUSE-release-tools-20181023.9b1618e/repo_checker.py
--- old/openSUSE-release-tools-20181017.3282c9a/repo_checker.py 2018-10-17
23:12:55.000000000 +0200
+++ new/openSUSE-release-tools-20181023.9b1618e/repo_checker.py 2018-10-23
06:38:17.000000000 +0200
@@ -169,8 +169,9 @@
def binary_whitelist(self, override_pair, overridden_pair, arch):
whitelist = self.binary_list_existing_problem(overridden_pair[0],
overridden_pair[1])
- if Config.get(self.apiurl, overridden_pair[0]).get('staging'):
- additions =
self.staging_api(overridden_pair[0]).get_prj_pseudometa(
+ staging = Config.get(self.apiurl, overridden_pair[0]).get('staging')
+ if staging:
+ additions = self.staging_api(staging).get_prj_pseudometa(
override_pair[0]).get('config', {})
prefix = 'repo_checker-binary-whitelist'
for key in [prefix, '-'.join([prefix, arch])]:
@@ -297,7 +298,7 @@
self.logger.info('cycle check: start')
comment = []
first = True
- cycle_detector = CycleDetector(self.staging_api(overridden_pair[0]))
+ cycle_detector = CycleDetector(self.apiurl)
for index, (cycle, new_edges, new_packages) in enumerate(
cycle_detector.cycles(override_pair, overridden_pair, arch),
start=1):
@@ -525,8 +526,9 @@
repository_pairs = []
# Assumes maintenance_release target project has staging disabled.
- if Config.get(self.apiurl, action.tgt_project).get('staging'):
- api = self.staging_api(action.tgt_project)
+ staging = Config.get(self.apiurl, action.tgt_project).get('staging')
+ if staging:
+ api = self.staging_api(staging)
stage_info = api.packages_staged.get(action.tgt_package)
if not stage_info or str(stage_info['rq_id']) !=
str(request.reqid):
self.logger.info('{} not staged'.format(request.reqid))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/systemd/osrt-rabbit-openqa.service
new/openSUSE-release-tools-20181023.9b1618e/systemd/osrt-rabbit-openqa.service
---
old/openSUSE-release-tools-20181017.3282c9a/systemd/osrt-rabbit-openqa.service
1970-01-01 01:00:00.000000000 +0100
+++
new/openSUSE-release-tools-20181023.9b1618e/systemd/osrt-rabbit-openqa.service
2018-10-23 06:38:17.000000000 +0200
@@ -0,0 +1,9 @@
+[Unit]
+Description=openSUSE Release Tools: Sync openQA status
+
+[Service]
+User=osrt-rabbit-openqa
+ExecStart=/usr/bin/osrt-rabbit-openqa
+
+[Install]
+WantedBy=multi-user.target
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20181017.3282c9a/tests/fixtures/staging-meta-for-bootstrap-copy.xml
new/openSUSE-release-tools-20181023.9b1618e/tests/fixtures/staging-meta-for-bootstrap-copy.xml
---
old/openSUSE-release-tools-20181017.3282c9a/tests/fixtures/staging-meta-for-bootstrap-copy.xml
2018-10-17 23:12:55.000000000 +0200
+++
new/openSUSE-release-tools-20181023.9b1618e/tests/fixtures/staging-meta-for-bootstrap-copy.xml
2018-10-23 06:38:17.000000000 +0200
@@ -8,6 +8,7 @@
</build>
<publish>
<disable/>
+ <enable repository="images"/>
</publish>
<debuginfo>
<enable/>
++++++ openSUSE-release-tools.obsinfo ++++++
--- /var/tmp/diff_new_pack.PMG8hb/_old 2018-10-23 20:42:27.776395745 +0200
+++ /var/tmp/diff_new_pack.PMG8hb/_new 2018-10-23 20:42:27.776395745 +0200
@@ -1,5 +1,5 @@
name: openSUSE-release-tools
-version: 20181017.3282c9a
-mtime: 1539810775
-commit: 3282c9ae5678301abb00c14fbb4021d52dd76c04
+version: 20181023.9b1618e
+mtime: 1540269497
+commit: 9b1618e7c51660bb6870efcbdf686cb8645abcbb