Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package openSUSE-release-tools for
openSUSE:Factory checked in at 2023-04-05 21:28:32
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/openSUSE-release-tools (Old)
and /work/SRC/openSUSE:Factory/.openSUSE-release-tools.new.19717 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "openSUSE-release-tools"
Wed Apr 5 21:28:32 2023 rev:461 rq:1077461 version:20230329.af36a03
Changes:
--------
---
/work/SRC/openSUSE:Factory/openSUSE-release-tools/openSUSE-release-tools.changes
2023-03-22 22:32:29.382751053 +0100
+++
/work/SRC/openSUSE:Factory/.openSUSE-release-tools.new.19717/openSUSE-release-tools.changes
2023-04-05 21:35:55.830662792 +0200
@@ -1,0 +2,23 @@
+Wed Mar 29 09:03:19 UTC 2023 - [email protected]
+
+- Update to version 20230329.af36a03:
+ * osclib: Skip debuginfo, debugsource and src rpms in fileinfo_ext_all
+ * Remove try/catch around code which must not fail
+ * Rewrite core algorithm of cleanup_rings.py
+ * cleanup_rings.py: Fix links handling
+
+-------------------------------------------------------------------
+Fri Mar 24 09:26:39 UTC 2023 - [email protected]
+
+- Update to version 20230324.4a241c4:
+ * Allow cleanup_rings without locking the stagings
+ * cleanup_rings.py: Expand whitelist
+ * cleanup_rings.py: Look at all enabled images
+
+-------------------------------------------------------------------
+Thu Mar 23 10:43:13 UTC 2023 - [email protected]
+
+- Update to version 20230323.0d10247:
+ * Handle deletion of packages with _multibuild (#2943)
+
+-------------------------------------------------------------------
Old:
----
openSUSE-release-tools-20230315.982c565.obscpio
New:
----
openSUSE-release-tools-20230329.af36a03.obscpio
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ openSUSE-release-tools.spec ++++++
--- /var/tmp/diff_new_pack.hufhhc/_old 2023-04-05 21:35:56.806668362 +0200
+++ /var/tmp/diff_new_pack.hufhhc/_new 2023-04-05 21:35:56.814668408 +0200
@@ -20,7 +20,7 @@
%define source_dir openSUSE-release-tools
%define announcer_filename factory-package-news
Name: openSUSE-release-tools
-Version: 20230315.982c565
+Version: 20230329.af36a03
Release: 0
Summary: Tools to aid in staging and release work for openSUSE/SUSE
License: GPL-2.0-or-later AND MIT
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.hufhhc/_old 2023-04-05 21:35:56.926669048 +0200
+++ /var/tmp/diff_new_pack.hufhhc/_new 2023-04-05 21:35:56.934669093 +0200
@@ -1,7 +1,7 @@
<servicedata>
<service name="tar_scm">
<param
name="url">https://github.com/openSUSE/openSUSE-release-tools.git</param>
- <param
name="changesrevision">982c565e51ba25ee52e40369c98ee9b35607d837</param>
+ <param
name="changesrevision">af36a030b3d2e34f24828caf849c463ffeb2db14</param>
</service>
</servicedata>
++++++ openSUSE-release-tools-20230315.982c565.obscpio ->
openSUSE-release-tools-20230329.af36a03.obscpio ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20230315.982c565/osc-staging.py
new/openSUSE-release-tools-20230329.af36a03/osc-staging.py
--- old/openSUSE-release-tools-20230315.982c565/osc-staging.py 2023-03-15
12:37:09.000000000 +0100
+++ new/openSUSE-release-tools-20230329.af36a03/osc-staging.py 2023-03-29
11:02:25.000000000 +0200
@@ -68,7 +68,7 @@
def lock_needed(cmd, opts):
return not (
cmd in ('check', 'check_duplicate_binaries', 'check_local_links',
- 'frozenage', 'rebuild', 'unlock', 'setprio') or
+ 'frozenage', 'rebuild', 'unlock', 'setprio', 'cleanup_rings')
or
(cmd == 'list' and not opts.supersede)
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20230315.982c565/osclib/cleanup_rings.py
new/openSUSE-release-tools-20230329.af36a03/osclib/cleanup_rings.py
--- old/openSUSE-release-tools-20230315.982c565/osclib/cleanup_rings.py
2023-03-15 12:37:09.000000000 +0100
+++ new/openSUSE-release-tools-20230329.af36a03/osclib/cleanup_rings.py
2023-03-29 11:02:25.000000000 +0200
@@ -3,8 +3,7 @@
from osc.core import http_GET
from osclib.core import fileinfo_ext_all
from osclib.core import builddepinfo
-
-from urllib.error import HTTPError
+from osclib.memoize import memoize
class CleanupRings(object):
@@ -16,9 +15,20 @@
self.links = {}
self.commands = []
self.whitelist = [
- # Must remain in ring-1 with other kernel packages to keep matching
- # build number, but is required by virtualbox in ring-2.
- 'kernel-syms',
+ # Keep this in ring 1, even though ring 0 builds the main flavor
+ # and ring 1 has that disabled.
+ 'automake:testsuite',
+ 'meson:test',
+ # buildtime services aren't visible in _builddepinfo
+ 'obs-service-recompress',
+ 'obs-service-set_version',
+ 'obs-service-tar_scm',
+ # Used by ARM only, but part of oS:F ring 1 in general
+ 'u-boot',
+ 'raspberrypi-firmware-dt',
+ 'raspberrypi-firmware-config',
+ # Added manually to notice failures early
+ 'vagrant',
]
def perform(self):
@@ -49,7 +59,7 @@
if dprj != self.api.project:
if not dprj.startswith(self.api.crings):
print("#{} not linking to base {} but {}".format(pkg,
self.api.project, dprj))
- self.links[dpkg] = pkg
+ self.links[pkg] = dpkg
# multi spec package must link to ring
elif len(links) > 1:
mainpkg = links[1].get('package')
@@ -66,19 +76,13 @@
else:
if pkg != 'glibc.i686': # FIXME: ugly exception
print("osc linkpac -f {}/{}
{}/{}".format(destring, mainpkg, prj, pkg))
- self.links[mainpkg] = pkg
+ self.links[pkg] = mainpkg
- def fill_pkgdeps(self, prj, repo, arch):
+ def fill_pkginfo(self, prj, repo, arch):
root = builddepinfo(self.api.apiurl, prj, repo, arch)
for package in root.findall('package'):
- # use main package name for multibuild. We can't just ignore
- # multibuild as eg installation-images has no results for the main
- # package itself
- # https://github.com/openSUSE/open-build-service/issues/4198
- name = package.attrib['name'].split(':')[0]
- if name.startswith('preinstall'):
- continue
+ name = package.attrib['name']
self.sources.add(name)
@@ -88,19 +92,9 @@
if self.bin2src[subpkg] == name:
# different archs
continue
- print('# Binary {} is defined twice: {}/{}'.format(subpkg,
prj, name))
+ print('# Binary {} is defined twice: {}
{}+{}'.format(subpkg, prj, name, self.bin2src[subpkg]))
self.bin2src[subpkg] = name
- for package in root.findall('package'):
- name = package.attrib['name'].split(':')[0]
- for pkg in package.findall('pkgdep'):
- if pkg.text not in self.bin2src:
- if not pkg.text.startswith('texlive-'): # XXX: texlive
bullshit packaging
- print('Package {} not found in place'.format(pkg.text))
- continue
- b = self.bin2src[pkg.text]
- self.pkgdeps[b] = name
-
def repo_state_acceptable(self, project):
url = makeurl(self.api.apiurl, ['build', project, '_result'])
root = ET.parse(http_GET(url)).getroot()
@@ -118,14 +112,14 @@
return True
def check_image_bdeps(self, project, arch):
- for dvd in ('000product:openSUSE-dvd5-dvd-{}'.format(arch),
'Test-DVD-{}'.format(arch)):
- try:
- url = makeurl(self.api.apiurl, ['build', project, 'images',
arch, dvd, '_buildinfo'])
- root = ET.parse(http_GET(url)).getroot()
- except HTTPError as e:
- if e.code == 404:
- continue
- raise
+ url = makeurl(self.api.apiurl, ['build', project, '_result'])
+ root = ET.parse(http_GET(url)).getroot()
+ for image in root.xpath(f"result[@repository = 'images' and @arch =
'{arch}']/status[@code != 'excluded' and @code != 'disabled']"):
+ dvd = image.get('package')
+ url = makeurl(self.api.apiurl, ['build', project, 'images', arch,
dvd, '_buildinfo'])
+ root = ET.parse(http_GET(url)).getroot()
+ # Don't delete the image itself
+ self.pkgdeps[dvd.split(':')[0]] =
'MYdvd{}'.format(self.api.rings.index(project))
for bdep in root.findall('bdep'):
if 'name' not in bdep.attrib:
continue
@@ -135,7 +129,6 @@
continue
b = self.bin2src[b]
self.pkgdeps[b] =
'MYdvd{}'.format(self.api.rings.index(project))
- break
def check_buildconfig(self, project):
url = makeurl(self.api.apiurl, ['build', project, 'standard',
'_buildconfig'])
@@ -148,48 +141,123 @@
b = self.bin2src[prein]
self.pkgdeps[b] = 'MYinstall'
- def check_requiredby(self, project, package):
- # Prioritize x86_64 bit.
- for arch in reversed(self.api.cstaging_archs):
- for fileinfo in fileinfo_ext_all(self.api.apiurl, project,
'standard', arch, package):
- for requiredby in
fileinfo.findall('provides_ext/requiredby[@name]'):
- b = self.bin2src[requiredby.get('name')]
- if b == package:
- # A subpackage depending on self.
- continue
- self.pkgdeps[package] = b
- return True
- return False
+ @memoize(session=True)
+ def package_get_requiredby(self, project, package, repo, arch):
+ "For a given package, return which source packages it provides runtime
deps for."
+ ret = set()
+ for fileinfo in fileinfo_ext_all(self.api.apiurl, project, repo, arch,
package):
+ for requiredby in
fileinfo.findall('provides_ext/requiredby[@name]'):
+ ret.add(self.bin2src[requiredby.get('name')])
+
+ return ret
def check_depinfo_ring(self, prj, nextprj):
if not self.repo_state_acceptable(prj):
return False
+ # Dict of linking package -> linked package
+ self.links = {}
self.find_inner_ring_links(prj)
- for arch in self.api.cstaging_archs:
- self.fill_pkgdeps(prj, 'standard', arch)
- if self.api.rings.index(prj) == 0:
- self.check_buildconfig(prj)
- else:
- for arch in self.api.cstaging_archs:
+ # Only loop through sources once from their origin ring to ensure
single
+ # step moving to allow check_requiredby() to see result in each ring.
+ self.sources = set()
+ all_needed_sources = set()
+
+ # For each arch, collect needed source packages.
+ # Prioritize x86_64.
+ for arch in reversed(self.api.cstaging_archs):
+ print(f"Arch {arch}")
+
+ # Dict of needed source pkg -> reason why it's needed
+ self.pkgdeps = {}
+ # Note: bin2src is not cleared, that way ring1 pkgs can depend
+ # on binaries from ring0.
+ self.fill_pkginfo(prj, 'standard', arch)
+
+ # 1. No images built, just for bootstrapping the rpm buildenv.
+ # 2. Treat multibuild flavors as independent packages
+ is_ring0 = self.api.rings.index(prj) == 0
+
+ # Collect directly needed packages:
+ # For ring 0, prjconf (Preinstall). For ring 1, images.
+ if is_ring0:
+ self.check_buildconfig(prj)
+ else:
self.check_image_bdeps(prj, arch)
- for source in self.sources:
- if (source not in self.pkgdeps and
- source not in self.links and
- source not in self.whitelist):
- if source.startswith('texlive-specs-'): # XXX: texlive
bullshit packaging
+ # Keep all preinstallimages
+ for pkg in self.sources:
+ if pkg.startswith("preinstallimage"):
+ self.pkgdeps[pkg] = "preinstallimage"
+
+ # Treat all binaries in the whitelist as needed
+ for pkg in self.whitelist:
+ if pkg in self.sources:
+ self.pkgdeps[pkg] = "whitelist"
+
+ to_visit = set(self.pkgdeps)
+ # print("Directly needed: ", to_visit)
+
+ url = makeurl(self.api.apiurl, ['build', prj, 'standard', arch,
'_builddepinfo'], {"view": "pkgnames"})
+ root = ET.parse(http_GET(url)).getroot()
+
+ while len(to_visit) > 0:
+ new_deps = {}
+ for pkg in to_visit:
+ if not is_ring0:
+ # Outside of ring0, if one multibuild flavor is
needed, add all of them
+ mainpkg = pkg.split(":")[0]
+ for src in self.sources:
+ if src.startswith(f"{mainpkg}:"):
+ new_deps[src] = pkg
+
+ # Same for link groups
+ for ldst, lsrc in self.links.items():
+ if lsrc == mainpkg:
+ new_deps[ldst] = pkg
+ elif ldst == mainpkg:
+ new_deps[lsrc] = pkg
+
+ # Add all packages which this package depends on
+ for dep in root.xpath(f"package[@name='{pkg}']/pkgdep"):
+ new_deps[dep.text] = pkg
+
+ # Filter out already visited deps
+ to_visit = set(new_deps).difference(set(self.pkgdeps))
+ for pkg, reason in new_deps.items():
+ self.pkgdeps[pkg] = reason
+
+ all_needed_sources |= set(self.pkgdeps)
+
+ # _builddepinfo only takes care of build deps. runtime deps
are handled by
+ # fileinfo_ext_all, but that's really expensive. Thus the
"obvious" algorithm
+ # of walking from needed packages to their deps would be too
slow. Instead,
+ # walk from possibly unneeded packages (much fewer than
needed) and check whether
+ # they satisfy runtime deps of needed packages.
+ # Do this after each batch of buildtime deps were resolved to
minimize lookups.
+ if len(to_visit) != 0:
continue
- # Expensive check so left until last.
- if self.check_requiredby(prj, source):
- continue
-
- print('# - {}'.format(source))
- self.commands.append('osc rdelete -m cleanup {}
{}'.format(prj, source))
- if nextprj:
- self.commands.append('osc linkpac {} {}
{}'.format(self.api.project, source, nextprj))
- # Only loop through sources once from their origin ring to ensure
single
- # step moving to allow check_requiredby() to see result in each ring.
- self.sources = set()
+ # Technically this should be self.pkgdeps, but on i586 pretty
much nothing
+ # is needed (no built images) so we continue where x86_64 left
off
+ maybe_unneeded = self.sources.difference(all_needed_sources)
+ for pkg in sorted(maybe_unneeded):
+ requiredby = self.package_get_requiredby(prj, pkg,
'standard', arch)
+ requiredby = requiredby.intersection(all_needed_sources)
+ # Required by needed packages?
+ if len(requiredby):
+ print(f"# {pkg} needed by {requiredby}")
+ # Include it and also resolve its build deps
+ self.pkgdeps[pkg] = requiredby
+ to_visit.add(pkg)
+
+ self.commands.append(f"# For {prj}:")
+ for source in sorted(self.sources):
+ if source not in all_needed_sources:
+ if ":" in source:
+ self.commands.append(f"# Multibuild flavor {source} not
needed")
+ else:
+ self.commands.append('osc rdelete -m cleanup {}
{}'.format(prj, source))
+ if nextprj:
+ self.commands.append('osc linkpac {} {}
{}'.format(self.api.project, source, nextprj))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20230315.982c565/osclib/core.py
new/openSUSE-release-tools-20230329.af36a03/osclib/core.py
--- old/openSUSE-release-tools-20230315.982c565/osclib/core.py 2023-03-15
12:37:09.000000000 +0100
+++ new/openSUSE-release-tools-20230329.af36a03/osclib/core.py 2023-03-29
11:02:25.000000000 +0200
@@ -356,6 +356,10 @@
filename = binary.get('filename')
if not filename.endswith('.rpm'):
continue
+ if filename.endswith('.src.rpm'):
+ continue
+ if '-debuginfo-' in filename or '-debugsource-' in filename:
+ continue
yield fileinfo_ext(apiurl, project, repo, arch, package, filename)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/openSUSE-release-tools-20230315.982c565/staging-installcheck.py
new/openSUSE-release-tools-20230329.af36a03/staging-installcheck.py
--- old/openSUSE-release-tools-20230315.982c565/staging-installcheck.py
2023-03-15 12:37:09.000000000 +0100
+++ new/openSUSE-release-tools-20230329.af36a03/staging-installcheck.py
2023-03-29 11:02:25.000000000 +0200
@@ -21,6 +21,7 @@
from osclib.repochecks import installcheck, mirror
from osclib.stagingapi import StagingAPI
+from osclib.memoize import memoize
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
CheckResult = namedtuple('CheckResult', ('success', 'comment'))
@@ -69,20 +70,33 @@
comments.append('Error: missing alternative provides for
{}'.format(provide))
return False
+ @memoize(session=True)
+ def pkg_with_multibuild_flavors(self, package):
+ ret = set([package])
+ # Add all multibuild flavors
+ mainprjresult =
ET.fromstringlist(osc.core.show_results_meta(self.api.apiurl, self.api.project,
multibuild=True))
+ for pkg in
mainprjresult.xpath(f"result/status[starts-with(@package,'{package}:')]"):
+ ret.add(pkg.get('package'))
+
+ return ret
+
def check_delete_request(self, req, to_ignore, to_delete, comments):
package = req.get('package')
if package in to_ignore or self.ignore_deletes:
self.logger.info('Delete request for package {}
ignored'.format(package))
return True
+ pkg_flavors = self.pkg_with_multibuild_flavors(package)
+
built_binaries = set()
file_infos = []
- for fileinfo in fileinfo_ext_all(self.api.apiurl, self.api.project,
self.api.cmain_repo, 'x86_64', package):
- built_binaries.add(fileinfo.find('name').text)
- file_infos.append(fileinfo)
+ for flavor in pkg_flavors:
+ for fileinfo in fileinfo_ext_all(self.api.apiurl,
self.api.project, self.api.cmain_repo, 'x86_64', flavor):
+ built_binaries.add(fileinfo.find('name').text)
+ file_infos.append(fileinfo)
# extend the others - this asks for a refactoring, but we don't handle
tons of delete requests often
for ptd in to_delete:
- if package == ptd:
+ if ptd in pkg_flavors:
continue
for fileinfo in fileinfo_ext_all(self.api.apiurl,
self.api.project, self.api.cmain_repo, 'x86_64', ptd):
built_binaries.add(fileinfo.find('name').text)
@@ -93,7 +107,7 @@
for requiredby in provides.findall('requiredby[@name]'):
result = result and self.check_required_by(fileinfo,
provides, requiredby, built_binaries, comments)
- what_depends_on = depends_on(api.apiurl, api.project, api.cmain_repo,
[package], True)
+ what_depends_on = depends_on(api.apiurl, api.project, api.cmain_repo,
pkg_flavors, True)
# filter out packages to be deleted
for ptd in to_delete:
@@ -174,7 +188,7 @@
to_delete = set()
for req in status.findall('staged_requests/request'):
if req.get('type') == 'delete':
- to_delete.add(req.get('package'))
+ to_delete |=
self.pkg_with_multibuild_flavors(req.get('package'))
for req in status.findall('staged_requests/request'):
if req.get('type') == 'delete':
++++++ openSUSE-release-tools.obsinfo ++++++
--- /var/tmp/diff_new_pack.hufhhc/_old 2023-04-05 21:35:58.262676673 +0200
+++ /var/tmp/diff_new_pack.hufhhc/_new 2023-04-05 21:35:58.266676696 +0200
@@ -1,5 +1,5 @@
name: openSUSE-release-tools
-version: 20230315.982c565
-mtime: 1678880229
-commit: 982c565e51ba25ee52e40369c98ee9b35607d837
+version: 20230329.af36a03
+mtime: 1680080545
+commit: af36a030b3d2e34f24828caf849c463ffeb2db14