Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package product-composer for
openSUSE:Factory checked in at 2025-06-04 20:30:20
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/product-composer (Old)
and /work/SRC/openSUSE:Factory/.product-composer.new.16005 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "product-composer"
Wed Jun 4 20:30:20 2025 rev:38 rq:1282796 version:0.6.2
Changes:
--------
--- /work/SRC/openSUSE:Factory/product-composer/product-composer.changes
2025-06-02 22:01:08.658673730 +0200
+++
/work/SRC/openSUSE:Factory/.product-composer.new.16005/product-composer.changes
2025-06-04 20:30:33.177955076 +0200
@@ -1,0 +2,18 @@
+Wed Jun 4 14:34:39 UTC 2025 - Adrian Schröter <[email protected]>
+
+- update to version 0.6.2
+ * crash fix for agama builds
+ * Rework package set calculation
+
+ The new code allows to reference package sets that are defined
+ later in the recipe. It also allows to specify a flavor or
+ architecture in the reference, so that a flavor can reuse the
+ package set of a different flavor.
+
+-------------------------------------------------------------------
+Wed Jun 4 13:33:43 UTC 2025 - Adrian Schröter <[email protected]>
+
+- update to version 0.6.1:
+ * fixing a crash when no 'iso' tag is defined
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ product-composer.spec ++++++
--- /var/tmp/diff_new_pack.pjtE5P/_old 2025-06-04 20:30:33.829982113 +0200
+++ /var/tmp/diff_new_pack.pjtE5P/_new 2025-06-04 20:30:33.829982113 +0200
@@ -23,7 +23,7 @@
%endif
Name: product-composer
-Version: 0.6.0
+Version: 0.6.2
Release: 0
Summary: Product Composer
License: GPL-2.0-or-later
++++++ _scmsync.obsinfo ++++++
--- /var/tmp/diff_new_pack.pjtE5P/_old 2025-06-04 20:30:33.869983772 +0200
+++ /var/tmp/diff_new_pack.pjtE5P/_new 2025-06-04 20:30:33.873983937 +0200
@@ -1,5 +1,5 @@
-mtime: 1748856885
-commit: 13367e8fa0c6b7c66bea9a9e5f12134bd1abea745f140e8cd765ad558d9f6b09
+mtime: 1749047731
+commit: 702733326e72f65abbc7171921dbb7ab065c96ece2d55460b548f32bc349fc74
url: https://src.opensuse.org/tools/product-composer
revision: devel
++++++ build.specials.obscpio ++++++
++++++ product-composer.obscpio ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/product-composer/src/productcomposer/commands/verify.py
new/product-composer/src/productcomposer/commands/verify.py
--- old/product-composer/src/productcomposer/commands/verify.py 2025-06-02
11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/commands/verify.py 2025-06-04
16:35:48.000000000 +0200
@@ -1,6 +1,8 @@
from ..parsers.yamlparser import parse_yaml
from . import register
from ..utils.loggerutils import (die, note)
+from ..utils.rpmutils import (create_package_set)
+from ..core.Pool import Pool
# global db for eulas
eulas = {}
@@ -12,16 +14,26 @@
@register("verify")
class VerifyCommand:
def run(self, args):
- yml = parse_yaml(args.filename, args.flavor)
+ result = self.verify(args)
+
+ def verify_flavor(self, filename, flavor):
+ yml = parse_yaml(filename, flavor)
+ if 'architectures' not in yml or not yml['architectures']:
+ if flavor:
+ die(f'No architecture defined for flavor {flavor}')
+ else:
+ die('No architecture defined and no flavor.')
+ # check package sets
+ for arch in yml['architectures']:
+ pool = Pool()
+ for pkgset_name in yml['content']:
+ create_package_set(yml, arch, flavor, pkgset_name, pool=pool)
+ for pkgset_name in yml['unpack']:
+ create_package_set(yml, arch, flavor, pkgset_name, pool=pool)
+ return yml.get('flavors')
+
+ def verify(self, args):
+ flavors = self.verify_flavor(args.filename, args.flavor)
if args.flavor == None:
- for flavor in yml['flavors']:
- yml = parse_yaml(args.filename, flavor)
- if not yml['architectures']:
- die(f'No architecture defined for flavor {flavor}')
- if yml['content']:
- for pkgsetname in yml['content']:
- if pkgsetname not in (x['name'] for x in
yml['packagesets']):
- die(f'package set {pkgsetname} not defined for
flavor {flavor}')
- return
- if not yml['architectures']:
- die('No architecture defined and no flavor.')
+ for flavor in flavors:
+ self.verify_flavor(args.filename, flavor)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/product-composer/src/productcomposer/core/config.py
new/product-composer/src/productcomposer/core/config.py
--- old/product-composer/src/productcomposer/core/config.py 2025-06-02
11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/core/config.py 1970-01-01
01:00:00.000000000 +0100
@@ -1,107 +0,0 @@
-""" Global application configuration.
-
-This module defines a global configuration object. Other modules should use
-this object to store application-wide configuration values.
-
-"""
-from pathlib import Path
-from string import Template
-import re
-try:
- import tomllib # Python 3.11+
-except ModuleNotFoundError:
- import tomli as tomllib
-
-from .logger import logger
-
-
-__all__ = "config", "TomlConfig"
-
-
-class _AttrDict(dict):
- """ A dict-like object with attribute access.
-
- """
- def __getitem__(self, key: str):
- """ Access dict values by key.
-
- :param key: key to retrieve
- """
- value = super(_AttrDict, self).__getitem__(key)
- if isinstance(value, dict):
- # For mixed recursive assignment (e.g. `a["b"].c = value` to work
- # as expected, all dict-like values must themselves be _AttrDicts.
- # The "right way" to do this would be to convert to an _AttrDict on
- # assignment, but that requires overriding both __setitem__
- # (straightforward) and __init__ (good luck). An explicit type
- # check is used here instead of EAFP because exceptions would be
- # frequent for hierarchical data with lots of nested dicts.
- self[key] = value = _AttrDict(value)
- return value
-
- def __getattr__(self, key: str) -> object:
- """ Get dict values as attributes.
-
- :param key: key to retrieve
- """
- return self[key]
-
- def __setattr__(self, key: str, value: object):
- """ Set dict values as attributes.
-
- :param key: key to set
- :param value: new value for key
- """
- self[key] = value
-
-
-class TomlConfig(_AttrDict):
- """ Store data from TOML configuration files.
-
- """
- def __init__(self, paths=None, root=None, params=None):
- """ Initialize this object.
-
- :param paths: one or more config file paths to load
- :param root: place config values at this root
- :param params: mapping of parameter substitutions
- """
- super().__init__()
- if paths:
- self.load(paths, root, params)
-
- def load(self, paths, root=None, params=None):
- """ Load data from configuration files.
-
- Configuration values are read from a sequence of one or more TOML
- files. Files are read in the given order, and a duplicate value will
- overwrite the existing value. If a root is specified the config data
- will be loaded under that attribute.
-
- :param paths: one or more config file paths to load
- :param root: place config values at this root
- :param params: mapping of parameter substitutions
- """
- try:
- paths = [Path(paths)]
- except TypeError:
- # Assume this is a sequence of paths.
- pass
- if params is None:
- params = {}
- for path in paths:
- # Comments must be stripped prior to template substitution to avoid
- # any unintended semantics such as stray `$` symbols.
- comment = re.compile(r"\s*#.*$", re.MULTILINE)
- with open(path, "rt") as stream:
- logger.info(f"Reading config data from '{path}'")
- conf = comment.sub("", stream.read())
- toml = Template(conf).substitute(params)
- data = tomllib.loads(toml)
- if root:
- self.setdefault(root, {}).update(data)
- else:
- self.update(data)
-
-
-config = TomlConfig()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/product-composer/src/productcomposer/createartifacts/createagamaiso.py
new/product-composer/src/productcomposer/createartifacts/createagamaiso.py
--- old/product-composer/src/productcomposer/createartifacts/createagamaiso.py
2025-06-02 11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/createartifacts/createagamaiso.py
2025-06-04 16:35:48.000000000 +0200
@@ -6,7 +6,7 @@
from ..utils.cryptoutils import create_sha256_for
from ..config import (verbose_level, ISO_PREPARER)
-def create_agama_iso(outdir, isoconf, pool, workdir, application_id, arch):
+def create_agama_iso(outdir, isoconf, build_options, pool, workdir,
application_id, arch):
verbose = True if verbose_level > 0 else False
base = isoconf['base']
if verbose:
@@ -30,7 +30,7 @@
# create new iso
tempdir = f"{outdir}/mksusecd"
os.mkdir(tempdir)
- if 'base_skip_packages' not in yml['build_options']:
+ if 'base_skip_packages' not in build_options:
args = ['cp', '-al', workdir, f"{tempdir}/install"]
run_helper(args, failmsg="add tree to agama image")
args = ['mksusecd', agamaiso, tempdir, '--create', workdir +
'.install.iso']
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/product-composer/src/productcomposer/createartifacts/createtree.py
new/product-composer/src/productcomposer/createartifacts/createtree.py
--- old/product-composer/src/productcomposer/createartifacts/createtree.py
2025-06-02 11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/createartifacts/createtree.py
2025-06-04 16:35:48.000000000 +0200
@@ -160,7 +160,7 @@
create_updateinfo_xml(maindir, yml, pool, flavor, debugdir, sourcedir)
# Add License File and create extra .license directory
- if yml['iso'].get('tree', None) != 'drop':
+ if yml['iso'] and yml['iso'].get('tree', None) != 'drop':
licensefilename = '/license.tar'
if os.path.exists(maindir + '/license-' + yml['name'] + '.tar') or
os.path.exists(maindir + '/license-' + yml['name'] + '.tar.gz'):
licensefilename = '/license-' + yml['name'] + '.tar'
@@ -210,16 +210,17 @@
# Other medias/workdirs would then be generated as usual, as
# presumably you wouldn't need a bootable iso for source and
# debuginfo packages.
- if workdir == maindir and yml['iso']['base']:
- agama_arch = yml['architectures'][0]
- note(f"Export main tree into agama iso file for {agama_arch}")
- create_agama_iso(outdir, yml['iso'], pool, workdir,
application_id, agama_arch)
- elif yml['iso']:
- create_iso(outdir, yml['iso'], workdir, application_id);
-
- # cleanup
- if yml['iso']['tree'] == 'drop':
- shutil.rmtree(workdir)
+ if yml['iso']:
+ if workdir == maindir and yml['iso']['base']:
+ agama_arch = yml['architectures'][0]
+ note(f"Export main tree into agama iso file for {agama_arch}")
+ create_agama_iso(outdir, yml['iso'], yml['build_options'],
pool, workdir, application_id, agama_arch)
+ else:
+ create_iso(outdir, yml['iso'], workdir, application_id);
+
+ # cleanup
+ if yml['iso']['tree'] == 'drop':
+ shutil.rmtree(workdir)
# create SBOM data
generate_sbom_call = None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/product-composer/src/productcomposer/dispatcher.py
new/product-composer/src/productcomposer/dispatcher.py
--- old/product-composer/src/productcomposer/dispatcher.py 2025-06-02
11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/dispatcher.py 2025-06-04
16:35:48.000000000 +0200
@@ -1,10 +1,8 @@
from .commands import COMMANDS
def dispatch(args):
- print(COMMANDS)
cmd_class = COMMANDS.get(args.command)
- print(args.command)
if not cmd_class:
raise ValueError(f"Unknown command: {args.command}")
cmd_instance = cmd_class()
- cmd_instance.run(args)
\ No newline at end of file
+ cmd_instance.run(args)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/product-composer/src/productcomposer/utils/rpmutils.py
new/product-composer/src/productcomposer/utils/rpmutils.py
--- old/product-composer/src/productcomposer/utils/rpmutils.py 2025-06-02
11:35:23.000000000 +0200
+++ new/product-composer/src/productcomposer/utils/rpmutils.py 2025-06-04
16:35:48.000000000 +0200
@@ -1,4 +1,5 @@
import os
+import re
import shutil
from ..core.PkgSet import PkgSet
@@ -12,55 +13,89 @@
die('need a package pool to create the __all__ package set')
pkgset = PkgSet(setname)
pkgset.add_specs([n for n in pool.names(arch) if not
(n.endswith('-debuginfo') or n.endswith('-debugsource'))])
-
return pkgset
-
-def create_package_set(yml, arch, flavor, setname, pool=None):
- pkgsets = {}
+def filter_pkgsets(yml, arch, flavor):
+ pkgsets_raw = {}
for entry in list(yml['packagesets']):
name = entry['name'] if 'name' in entry else 'main'
- if name in pkgsets and pkgsets.get(name, None):
- die(f'package set {name} is already defined')
- pkgsets[name] = None
+ if name not in pkgsets_raw:
+ pkgsets_raw[name] = None # mark as known
if flavor and entry['flavors'] and flavor not in entry['flavors']:
continue
if entry['architectures'] and arch not in entry['architectures']:
continue
- pkgset = PkgSet(name)
- pkgsets[name] = pkgset
- if entry['supportstatus']:
- pkgset.supportstatus = entry['supportstatus']
- if pkgset.supportstatus.startswith('='):
- pkgset.override_supportstatus = True
- pkgset.supportstatus = pkgset.supportstatus[1:]
- if entry['packages']:
- pkgset.add_specs(entry['packages'])
- for setop in 'add', 'sub', 'intersect':
- if entry.get(setop, None) is None:
- continue
- for oname in entry[setop]:
- if oname == '__all__' and oname not in pkgsets:
- pkgsets[oname] = create_package_set_all(oname, pool, arch)
- if oname == name or oname not in pkgsets:
- die(f'package set {oname} does not exist')
- if pkgsets[oname] is None:
- pkgsets[oname] = PkgSet(oname) # instantiate
- match setop:
- case 'add':
- pkgset.add(pkgsets[oname])
- case 'sub':
- pkgset.sub(pkgsets[oname])
- case 'intersect':
- pkgset.intersect(pkgsets[oname])
- case _:
- die(f"unsupported package set operation '{setop}'")
-
- if setname not in pkgsets:
- die(f'package set {setname} is not defined')
- if pkgsets[setname] is None:
- pkgsets[setname] = PkgSet(setname) # instantiate
- return pkgsets[setname]
+ if pkgsets_raw.get(name):
+ die(f'package set {name} is already defined')
+ pkgsets_raw[name] = entry
+ return pkgsets_raw
+
+def create_package_set_cached(yml, arch, flavor, setname, pkgsetcache,
pkgsets_rawcache, pool=None):
+ if flavor is None:
+ flavor = ''
+
+ # process arch/flavor overwrites
+ m =
re.fullmatch(r'(\S+)(?:\s+architecture=(\S+))?(?:\s+flavor=(\S*))?(?:\s+architecture=(\S+))?\s*',
setname)
+ if m:
+ setname = m[1]
+ arch = m[4] or m[2] or arch
+ flavor = m[3] or flavor
+
+ if setname == '__all__':
+ setkey = f"{setname}/{arch}"
+ if not pkgsetcache.get(setkey):
+ pkgsetcache[setkey] = create_package_set_all(setname, pool, arch)
+ return pkgsetcache[setkey]
+
+ setkey = f"{setname}/{arch}/{flavor}"
+ if setkey in pkgsetcache:
+ if not pkgsetcache[setkey]:
+ die(f"cyclic definition of package set '{setname}'")
+ return pkgsetcache[setkey]
+ pkgsetcache[setkey] = None # mark as in progress for cycle detection
+
+ rawcachekey = f"{arch}/{flavor}"
+ pkgsets_raw = pkgsets_rawcache.get(rawcachekey)
+ if not pkgsets_raw:
+ pkgsets_raw = filter_pkgsets(yml, arch, flavor)
+ pkgsets_rawcache[rawcachekey] = pkgsets_raw
+
+ if setname not in pkgsets_raw:
+ die(f'package set {setname} does not exist')
+
+ pkgset = PkgSet(setname)
+
+ entry = pkgsets_raw.get(setname)
+ if not entry:
+ pkgsetcache[setkey] = pkgset
+ return pkgset # return empty package set if there is no matching
flavor/arch
+
+ if entry['supportstatus']:
+ pkgset.supportstatus = entry['supportstatus']
+ if pkgset.supportstatus.startswith('='):
+ pkgset.override_supportstatus = True
+ pkgset.supportstatus = pkgset.supportstatus[1:]
+ if entry['packages']:
+ pkgset.add_specs(entry['packages'])
+ for setop in 'add', 'sub', 'intersect':
+ if entry.get(setop) is None:
+ continue
+ for oname in entry[setop]:
+ opkgset = create_package_set_cached(yml, arch, flavor, oname,
pkgsetcache, pkgsets_rawcache, pool=pool)
+ match setop:
+ case 'add':
+ pkgset.add(opkgset)
+ case 'sub':
+ pkgset.sub(opkgset)
+ case 'intersect':
+ pkgset.intersect(opkgset)
+ case _:
+ die(f"unsupported package set operation '{setop}'")
+ pkgsetcache[setkey] = pkgset
+ return pkgset
+
+def create_package_set(yml, arch, flavor, setname, pool=None):
+ return create_package_set_cached(yml, arch, flavor, setname, {}, {},
pool=pool)
def link_file_into_dir(source, directory, name=None):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore'
old/product-composer/tests/unit/core/test_config.py.disabled
new/product-composer/tests/unit/core/test_config.py.disabled
--- old/product-composer/tests/unit/core/test_config.py.disabled
2025-06-02 11:35:23.000000000 +0200
+++ new/product-composer/tests/unit/core/test_config.py.disabled
1970-01-01 01:00:00.000000000 +0100
@@ -1,82 +0,0 @@
-""" Test suite for the core.config module.
-
-"""
-from pathlib import Path
-
-import pytest
-from {{ cookiecutter.app_name }}.core.config import * # tests __all__
-
-
-class TomlConfigTest(object):
- """ Test suite for the YamlConfig class.
-
- """
- @classmethod
- @pytest.fixture
- def files(cls, tmp_path):
- """ Return configuration files for testing.
-
- """
- files = "conf1.toml", "conf2.toml"
- return tuple(Path("tests", "assets", item) for item in files)
-
- @classmethod
- @pytest.fixture
- def params(cls):
- """ Define configuration parameters.
-
- """
- return {"var1": "VAR1", "var2": "VAR2", "var3": "VAR3"}
-
- def test_item(self):
- """ Test item access.
-
- """
- config = TomlConfig()
- config["root"] = {}
- config["root"]["key"] = "value"
- assert config["root"]["key"] == "value"
- return
-
- def test_attr(self):
- """ Test attribute access.
-
- """
- config = TomlConfig()
- config.root = {}
- config.root.key = "value"
- assert config.root.key == "value"
- return
-
- @pytest.mark.parametrize("root", (None, "root"))
- def test_init(self, files, params, root):
- """ Test the __init__() method for loading a file.
-
- """
- merged = {"str": "$str", "var": "VAR1VAR3"}
- config = TomlConfig(files, root, params)
- if root:
- assert config == {root: merged}
- else:
- assert config == merged
- return
-
- @pytest.mark.parametrize("root", (None, "root"))
- def test_load(self, files, params, root):
- """ Test the load() method.
-
- """
- merged = {"str": "$str", "var": "VAR1VAR3"}
- config = TomlConfig()
- config.load(files, root, params)
- if root:
- assert config == {root: merged}
- else:
- assert config == merged
- return
-
-
-# Make the module executable.
-
-if __name__ == "__main__":
- raise SystemExit(pytest.main([__file__]))