Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package osc for openSUSE:Factory checked in 
at 2026-02-23 16:13:25
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/osc (Old)
 and      /work/SRC/openSUSE:Factory/.osc.new.1977 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "osc"

Mon Feb 23 16:13:25 2026 rev:222 rq:1334492 version:1.24.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/osc/osc.changes  2025-12-18 18:37:10.804050401 
+0100
+++ /work/SRC/openSUSE:Factory/.osc.new.1977/osc.changes        2026-02-23 
16:15:24.332399451 +0100
@@ -1,0 +2,20 @@
+Mon Feb 23 12:37:11 UTC 2026 - Daniel Mach <[email protected]>
+
+- 1.24.0
+  - Command-line:
+    - Add '--target-owner' option to 'git-obs repo fork' command
+    - Add '--self' parameter to fix 'no matching parent repo' error message in 
'git-obs pr create'
+    - Fix 'osc aggregatepac' for scmsync packages
+    - Fix 'osc build' to retrieve buildconfig from git package's cache
+    - Fix 'osc token' error handling for project wide trigger
+    - Fix string formatting for id in obs-request.xml in 'git-obs pr dump'
+  - Library:
+    - Consolidate build types in build.py and commandline.py
+    - Fix build.get_build_type() by comparing binary_type only if specified
+    - Make use of queryconfig tool configurable and consistent
+    - Fix how get_request_collection() filters the projects and packages
+    - Support copying packages from an scmsync source, when target exists
+    - Add timestamps to the DEBUG output
+    - Update new project template
+
+-------------------------------------------------------------------

Old:
----
  osc-1.23.0.tar.gz

New:
----
  osc-1.24.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ osc.spec ++++++
--- /var/tmp/diff_new_pack.Dm68z1/_old  2026-02-23 16:15:25.180434417 +0100
+++ /var/tmp/diff_new_pack.Dm68z1/_new  2026-02-23 16:15:25.184434581 +0100
@@ -80,7 +80,7 @@
 %endif
 
 Name:           osc
-Version:        1.23.0
+Version:        1.24.0
 Release:        0
 Summary:        Command-line client for the Open Build Service
 License:        GPL-2.0-or-later

++++++ PKGBUILD ++++++
--- /var/tmp/diff_new_pack.Dm68z1/_old  2026-02-23 16:15:25.240436890 +0100
+++ /var/tmp/diff_new_pack.Dm68z1/_new  2026-02-23 16:15:25.244437055 +0100
@@ -1,5 +1,5 @@
 pkgname=osc
-pkgver=1.23.0
+pkgver=1.24.0
 pkgrel=0
 pkgdesc="Command-line client for the Open Build Service"
 arch=('x86_64')

++++++ debian.changelog ++++++
--- /var/tmp/diff_new_pack.Dm68z1/_old  2026-02-23 16:15:25.300439364 +0100
+++ /var/tmp/diff_new_pack.Dm68z1/_new  2026-02-23 16:15:25.308439694 +0100
@@ -1,4 +1,4 @@
-osc (1.23.0-0) unstable; urgency=low
+osc (1.24.0-0) unstable; urgency=low
 
   * Placeholder
 

++++++ osc-1.23.0.tar.gz -> osc-1.24.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/NEWS new/osc-1.24.0/NEWS
--- old/osc-1.23.0/NEWS 2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/NEWS 2026-02-23 13:35:22.000000000 +0100
@@ -1,3 +1,20 @@
+- 1.24.0
+  - Command-line:
+    - Add '--target-owner' option to 'git-obs repo fork' command
+    - Add '--self' parameter to fix 'no matching parent repo' error message in 
'git-obs pr create'
+    - Fix 'osc aggregatepac' for scmsync packages
+    - Fix 'osc build' to retrieve buildconfig from git package's cache
+    - Fix 'osc token' error handling for project wide trigger
+    - Fix string formatting for id in obs-request.xml in 'git-obs pr dump'
+  - Library:
+    - Consolidate build types in build.py and commandline.py
+    - Fix build.get_build_type() by comparing binary_type only if specified
+    - Make use of queryconfig tool configurable and consistent
+    - Fix how get_request_collection() filters the projects and packages
+    - Support copying packages from an scmsync source, when target exists
+    - Add timestamps to the DEBUG output
+    - Update new project template
+
 - 1.23.0
   - Command-line:
     - Add '--target-owner' option to 'git-obs pr create' to specify the target 
owner explicitly
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/README.md new/osc-1.24.0/README.md
--- old/osc-1.23.0/README.md    2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/README.md    2026-02-23 13:35:22.000000000 +0100
@@ -13,6 +13,21 @@
 
 ## Installation
 
+
+Use osc directly from git if you're developing or testing it:
+
+    # clone the sources
+    git clone https://github.com/openSUSE/osc.git
+    cd osc
+
+    # optional: checkout a pull request
+    PR=<num>; git fetch origin pull/$PR/head:pr/$PR && git switch pr/$PR
+
+    # run osc or git-obs:
+    ./osc-wrapper.py ...
+    ./git-obs.py ...
+
+
 RPM packages are available in the 
[openSUSE:Tools](http://download.opensuse.org/repositories/openSUSE:/Tools/) 
repository.
 
     zypper addrepo --repo 
http://download.opensuse.org/repositories/openSUSE:/Tools/openSUSE_Tumbleweed/openSUSE:Tools.repo
@@ -23,14 +38,6 @@
     zypper addrepo --repo 
http://download.opensuse.org/repositories/OBS:/Server:/Unstable/openSUSE_Factory/OBS:Server:Unstable.repo
     zypper install osc
 
-To install from git, do
-
-    ./setup.py build
-    ./setup.py install
-
-Alternatively, you can directly use `./osc-wrapper.py` from the source 
directory,
-which is easier if you develop on osc.
-
 
 ## Configuration
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/__init__.py 
new/osc-1.24.0/osc/__init__.py
--- old/osc-1.23.0/osc/__init__.py      2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/__init__.py      2026-02-23 13:35:22.000000000 +0100
@@ -13,7 +13,7 @@
 
 
 from .util import git_version
-__version__ = git_version.get_version('1.23.0')
+__version__ = git_version.get_version('1.24.0')
 
 
 # vim: sw=4 et
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/build.py new/osc-1.24.0/osc/build.py
--- old/osc-1.23.0/osc/build.py 2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/build.py 2026-02-23 13:35:22.000000000 +0100
@@ -30,6 +30,7 @@
 from .util import archquery, debquery, packagequery, rpmquery
 from .util import repodata
 from .util.helper import decode_it
+from .util.models import *
 from .util.xml import xml_parse
 
 
@@ -74,6 +75,208 @@
     hostarch = 'hppa'
 
 
+class BuildType(BaseModel):
+    name: str = Field()
+    binary_type: Optional[str] = Field()
+    recipes: List[str] = Field()
+    package_suffix: Optional[str] = Field()
+    binary_packages_paths: List[str] = Field()
+    source_packages_paths: List[str] = Field()
+    prefer_packages_paths: List[str] = Field(default=[])
+    prefer_packages_exclude_paths: List[str] = Field(default=[])
+
+    def _get_files(self, topdir: str, patterns: List[str]) -> List[str]:
+        result = []
+        for pattern in patterns:
+            path = os.path.join(topdir, pattern.lstrip("/"))
+            for i in glob.glob(path, recursive=True):
+                if not os.path.isfile(i):
+                    continue
+                result.append(i)
+        return result
+
+    def get_binaries(self, topdir: str) -> List[str]:
+        return self._get_files(topdir, self.binary_packages_paths)
+
+    def get_sources(self, topdir: str) -> List[str]:
+        return self._get_files(topdir, self.source_packages_paths)
+
+    def get_recipes(self, topdir: str) -> List[str]:
+        result = []
+        for fn in os.listdir(topdir):
+            for pattern in self.recipes:
+                if fnmatch.fnmatch(fn, pattern):
+                    result.append(fn)
+        result.sort()
+        return result
+
+
+BUILD_TYPES: List[BuildType] = [
+    BuildType(
+        name="appimage",
+        recipes=["appimage.yml"],
+        binary_packages_paths=["OTHER/**/*.AppImage"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="arch",
+        recipes=["PKGBUILD"],
+        package_suffix="arch",
+        binary_packages_paths=["ARCHPKGS/**/*.pkg.tar*"],
+        source_packages_paths=[],
+        prefer_packages_paths=["**/*.pkg.tar*"],
+    ),
+    BuildType(
+        name="collax",
+        recipes=["build.collax"],
+        package_suffix="deb",
+        binary_packages_paths=["DEBS/**/*.deb"],
+        source_packages_paths=["SOURCES.DEB/**"],
+        prefer_packages_paths=["**/*.deb"],
+    ),
+    BuildType(
+        # IMPORTANT: keep in sync with "podman"
+        name="docker",
+        recipes=["Containerfile", "Dockerfile", "Containerfile.*", 
"Dockerfile.*"],
+        binary_packages_paths=["DOCKER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="dsc",
+        recipes=["*.dsc"],
+        package_suffix="deb",
+        binary_packages_paths=["DEBS/**/*.deb"],
+        source_packages_paths=["SOURCES.DEB/**"],
+        prefer_packages_paths=["**/*.deb"],
+    ),
+    BuildType(
+        name="fissile",
+        recipes=["fissile.yml"],
+        binary_packages_paths=["FISSILE/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="flatpak",
+        recipes=["*flatpak.yaml", "*flatpak.yml", "*flatpak.json"],
+        binary_packages_paths=["OTHER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="helm",
+        recipes=["Chart.yaml"],
+        binary_packages_paths=["HELM/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="kiwi",
+        recipes=["*.kiwi"],
+        binary_packages_paths=["KIWI/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="livebuild",
+        recipes=["*.livebuild"],
+        package_suffix="deb",
+        binary_packages_paths=["OTHER/**/*.iso"],
+        source_packages_paths=[],
+        prefer_packages_paths=["**/*.deb"],
+    ),
+    BuildType(
+        name="mkosi",
+        recipes=["mkosi.*"],
+        binary_packages_paths=["OTHER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        # IMPORTANT: keep in sync with "docker"
+        name="podman",
+        recipes=["Containerfile", "Dockerfile", "Containerfile.*", 
"Dockerfile.*"],
+        binary_packages_paths=["DOCKER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="preinstallimage",
+        recipes=["_preinstallimage"],
+        binary_packages_paths=["OTHER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="productcompose",
+        recipes=["*.productcompose"],
+        binary_packages_paths=["PRODUCT/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="simpleimage",
+        recipes=["simpleimage"],
+        binary_packages_paths=["OTHER/**"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="snapcraft",
+        recipes=["snapcraft.yaml"],
+        package_suffix="deb",
+        binary_packages_paths=["OTHER/**/*.snap"],
+        source_packages_paths=[],
+    ),
+    BuildType(
+        name="spec",
+        binary_type="rpm",
+        recipes=["*.spec"],
+        package_suffix="rpm",
+        binary_packages_paths=["RPMS/**/*.rpm"],
+        source_packages_paths=["SRPMS/**/*.src.rpm"],
+        prefer_packages_paths=["**/*.rpm"],
+        prefer_packages_exclude_paths=["*.src.rpm", "*.nosrc.rpm", 
"*.patch.rpm", "*.delta.rpm"],
+    ),
+    BuildType(
+        name="spec",
+        binary_type="deb",
+        recipes=["*.spec"],
+        package_suffix="deb",
+        binary_packages_paths=["DEBS/**/*.deb"],
+        source_packages_paths=["SDEBS/**/*.sdeb"],
+        prefer_packages_paths=["**/*.deb"],
+    ),
+]
+
+
+def get_build_type(name: str, *, binary_type: Optional[str] = None) -> 
BuildType:
+    for build_type in BUILD_TYPES:
+        # compare the binary type only if it's specified in the build_type
+        if binary_type and build_type.binary_type:
+            if build_type.name == name and build_type.binary_type == 
binary_type:
+                return build_type
+        elif build_type.name == name:
+            return build_type
+    raise oscerr.OscValueError(f"Unknown build type: {name}")
+
+
+def get_build_type_from_recipe_path(recipe_path: str) -> BuildType:
+    recipe_filename = os.path.basename(recipe_path)
+    for build_type in BUILD_TYPES:
+        for pattern in build_type.recipes:
+            if fnmatch.fnmatch(recipe_filename, pattern):
+                return build_type
+    raise oscerr.OscValueError(f"The specified recipe doesn't match any known 
build type: {recipe_filename}")
+
+
+def find_build_recipes(topdir: str, build_type: Optional[str] = None) -> 
List[str]:
+    """
+    Return all file names in ``topdir`` that match known build recipes.
+    """
+    topdir = os.path.abspath(topdir)
+    result = []
+    for fn in os.listdir(topdir):
+        recipe_path = os.path.join(topdir, fn)
+        try:
+            get_build_type_from_recipe_path(recipe_path)
+        except oscerr.OscValueError:
+            continue
+        result.append(fn)
+    return result
+
+
 class Buildinfo:
     """represent the contents of a buildinfo file"""
 
@@ -110,20 +313,11 @@
 
         # are we building .rpm or .deb?
         # XXX: shouldn't we deliver the type via the buildinfo?
-        self.pacsuffix = 'rpm'
-        if self.buildtype in ('dsc', 'collax') or self.binarytype == 'deb':
-            self.pacsuffix = 'deb'
-        if self.buildtype == 'arch':
-            self.pacsuffix = 'arch'
-        if self.buildtype == 'livebuild':
-            self.pacsuffix = 'deb'
-        if self.buildtype == 'docker':
-            # supports rpm and deb
-            self.pacsuffix = binarytype
-        if self.buildtype == 'snapcraft':
-            # atm ubuntu is used as base, but we need to be more clever when
-            # snapcraft also supports rpm
-            self.pacsuffix = 'deb'
+        build_type = get_build_type(self.buildtype, 
binary_type=self.binarytype)
+        self.pacsuffix = build_type.package_suffix
+        if not self.pacsuffix:
+            # the previous osc code did this for 'docker' build type, maybe 
it's ok to use it as a generic fallback
+            self.pacsuffix = self.binarytype
 
         # The architectures become a bit mad ...
         # buildarch: The architecture of the build result      (host arch in 
GNU definition)
@@ -394,106 +588,11 @@
     return (imagefile, imagesource, imageinfo, img_bins)
 
 
-def get_built_files(pacdir, buildtype):
-    if buildtype == 'spec':
-        debs_dir = os.path.join(pacdir, 'DEBS')
-        sdebs_dir = os.path.join(pacdir, 'SDEBS')
-        if os.path.isdir(debs_dir) or os.path.isdir(sdebs_dir):
-            # (S)DEBS directories detected, list their *.(s)deb files
-            b_built = subprocess.Popen(['find', debs_dir, '-name', '*.deb'],
-                                       
stdout=subprocess.PIPE).stdout.read().strip()
-            s_built = subprocess.Popen(['find', sdebs_dir, '-name', '*.sdeb'],
-                                       
stdout=subprocess.PIPE).stdout.read().strip()
-        else:
-            # default: (S)RPMS directories and their *.rpm files
-            b_built = subprocess.Popen(['find', os.path.join(pacdir, 'RPMS'),
-                                        '-name', '*.rpm'],
-                                       
stdout=subprocess.PIPE).stdout.read().strip()
-            s_built = subprocess.Popen(['find', os.path.join(pacdir, 'SRPMS'),
-                                        '-name', '*.rpm'],
-                                       
stdout=subprocess.PIPE).stdout.read().strip()
-    elif buildtype == 'kiwi':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'KIWI'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'docker':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DOCKER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'podman':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DOCKER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'fissile':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'FISSILE'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype in ('dsc', 'collax'):
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'DEBS'),
-                                    '-name', '*.deb'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = subprocess.Popen(['find', os.path.join(pacdir, 
'SOURCES.DEB'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-    elif buildtype == 'arch':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'ARCHPKGS'),
-                                    '-name', '*.pkg.tar*'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'livebuild':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-name', '*.iso*'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'helm':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'HELM'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'snapcraft':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-name', '*.snap'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'appimage':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-name', '*.AppImage'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'simpleimage':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'flatpak':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'preinstallimage':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'productcompose':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'PRODUCT'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    elif buildtype == 'mkosi':
-        b_built = subprocess.Popen(['find', os.path.join(pacdir, 'OTHER'),
-                                    '-type', 'f'],
-                                   
stdout=subprocess.PIPE).stdout.read().strip()
-        s_built = ''
-    else:
-        print('WARNING: Unknown package type \'%s\'.' % buildtype, 
file=sys.stderr)
-        b_built = ''
-        s_built = ''
-    return s_built, b_built
+def get_built_files(pacdir, buildtype, *, binary_type: Optional[str] = None) 
-> Tuple[str, str]:
+    build_type = get_build_type(buildtype, binary_type=binary_type)
+    sources = build_type.get_sources(pacdir)
+    binaries = build_type.get_binaries(pacdir)
+    return "\n".join(sources), "\n".join(binaries)
 
 
 def get_repo(path):
@@ -517,33 +616,35 @@
     paths = []
     repositories = []
 
-    suffix = '*.rpm'
-    if type in ('dsc', 'collax', 'livebuild'):
-        suffix = '*.deb'
-    elif type == 'arch':
-        suffix = '*.pkg.tar.*'
-
-    for dir in dirs:
+    for pkgs_dir in dirs:
         # check for repodata
-        repository = get_repo(dir)
-        if repository is None:
-            paths += glob.glob(os.path.join(os.path.abspath(dir), suffix))
-        else:
+        repository = get_repo(pkgs_dir)
+        if repository:
             repositories.append(repository)
+        else:
+            # we don't know binary type as we haven't received buildconfig yet
+            # that's why we add all files for any matching build types
+            for build_type in BUILD_TYPES:
+                if build_type.name != type:
+                    continue
+                for pattern in build_type.prefer_packages_paths:
+                    for pkg_path in 
glob.glob(os.path.join(os.path.abspath(pkgs_dir), pattern), recursive=True):
+                        use_package = True
+                        for exclude_pattern in 
build_type.prefer_packages_exclude_paths:
+                            if fnmatch.fnmatch(pkg_path, exclude_pattern):
+                                use_package = False
+                                break
+                        if use_package:
+                            paths.append(pkg_path)
 
     packageQueries = packagequery.PackageQueries(wanted_arch)
 
     for repository in repositories:
         repodataPackageQueries = repodata.queries(repository)
-
         for packageQuery in repodataPackageQueries:
             packageQueries.add(packageQuery)
 
     for path in paths:
-        if path.endswith('.src.rpm') or path.endswith('.nosrc.rpm'):
-            continue
-        if path.endswith('.patch.rpm') or path.endswith('.delta.rpm'):
-            continue
         packageQuery = packagequery.PackageQuery.query(path)
         packageQueries.add(packageQuery)
 
@@ -795,45 +896,13 @@
     vm_telnet = None
 
     build_descr = os.path.abspath(build_descr)
-    build_type = os.path.splitext(build_descr)[1][1:]
-    if build_type in ['spec', 'dsc', 'kiwi', 'productcompose', 'livebuild']:
-        # File extension works
-        pass
-    elif os.path.basename(build_descr) == 'PKGBUILD':
-        build_type = 'arch'
-    elif os.path.basename(build_descr) == 'build.collax':
-        build_type = 'collax'
-    elif os.path.basename(build_descr) == 'appimage.yml':
-        build_type = 'appimage'
-    elif os.path.basename(build_descr) == 'Chart.yaml':
-        build_type = 'helm'
-    elif os.path.basename(build_descr) == 'snapcraft.yaml':
-        build_type = 'snapcraft'
-    elif os.path.basename(build_descr) == 'simpleimage':
-        build_type = 'simpleimage'
-    elif os.path.basename(build_descr) == 'Containerfile' or 
os.path.basename(build_descr).startswith('Containerfile.'):
-        build_type = 'docker'
-    elif os.path.basename(build_descr) == 'Dockerfile' or 
os.path.basename(build_descr).startswith('Dockerfile.'):
-        build_type = 'docker'
-    elif os.path.basename(build_descr) == 'fissile.yml':
-        build_type = 'fissile'
-    elif os.path.basename(build_descr) == '_preinstallimage':
-        build_type = 'preinstallimage'
-    elif build_descr.endswith('flatpak.yaml') or 
build_descr.endswith('flatpak.yml') or build_descr.endswith('flatpak.json'):
-        build_type = 'flatpak'
-    elif os.path.basename(build_descr).startswith('mkosi.'):
-        build_type = 'mkosi'
-    else:
-        raise oscerr.WrongArgs(
-            'Unknown build type: \'%s\'. '
-            'Build description should end in .spec, .dsc, .kiwi, 
.productcompose or .livebuild. '
-            'Or being named PKGBUILD, build.collax, simpleimage, appimage.yml, 
'
-            'Chart.yaml, snapcraft.yaml, flatpak.json, flatpak.yml, 
flatpak.yaml, '
-            'preinstallimage, Dockerfile.*, Containerfile.* or mkosi.*' % 
build_type)
 
     if not os.path.isfile(build_descr):
         raise oscerr.WrongArgs('Error: build description file named \'%s\' 
does not exist.' % build_descr)
 
+    build_type_obj = get_build_type_from_recipe_path(build_descr)
+    build_type = build_type_obj.name
+
     buildargs = []
     buildargs.append('--statistics')
     if not opts.userootforbuild:
@@ -1069,8 +1138,8 @@
                 bc_file = open(bc_filename, 'w')
             bc_file.write(decode_it(bc))
             bc_file.flush()
-            if os.path.exists('/usr/lib/build/queryconfig') and not 
opts.nodebugpackages:
-                debug_pkgs = 
decode_it(return_external('/usr/lib/build/queryconfig', '--dist', bc_filename, 
'substitute', 'obs:cli_debug_packages'))
+            if os.path.exists(config.queryconfig_cmd) and not 
opts.nodebugpackages:
+                debug_pkgs = decode_it(return_external(config.queryconfig_cmd, 
'--dist', bc_filename, 'substitute', 'obs:cli_debug_packages'))
                 if len(debug_pkgs) > 0:
                     extra_pkgs.extend(debug_pkgs.strip().split(" "))
 
@@ -1123,8 +1192,8 @@
 
     # Set default binary type if cannot be detected
     binary_type = 'rpm'
-    if os.path.exists('/usr/lib/build/queryconfig'):
-        binary_type = decode_it(return_external('/usr/lib/build/queryconfig', 
'--dist', bc_filename, 'binarytype')).strip()
+    if os.path.exists(config.queryconfig_cmd):
+        binary_type = decode_it(return_external(config.queryconfig_cmd, 
'--dist', bc_filename, 'binarytype')).strip()
     # If binary type is set to a useless value, reset to 'rpm'
     if binary_type == 'UNDEFINED':
         binary_type = 'rpm'
@@ -1325,6 +1394,7 @@
             buildargs.append('--oldpackages=%s' % old_pkg_dir)
 
     # Make packages from buildinfo available as repos for kiwi/docker/fissile
+    # FIXME: add a new attribute to BuildType and decide based on it
     if build_type in ('kiwi', 'docker', 'podman', 'fissile', 'productcompose'):
         if os.path.lexists('repos'):
             shutil.rmtree('repos')
@@ -1648,7 +1718,7 @@
         pacdir = os.path.join(build_root, pacdir)
 
     if os.path.exists(pacdir):
-        (s_built, b_built) = get_built_files(pacdir, bi.buildtype)
+        (s_built, b_built) = get_built_files(pacdir, bi.buildtype, 
binary_type=binary_type)
 
         print()
         if s_built:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/commandline.py 
new/osc-1.24.0/osc/commandline.py
--- old/osc-1.23.0/osc/commandline.py   2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/commandline.py   2026-02-23 13:35:22.000000000 +0100
@@ -9,6 +9,7 @@
 import importlib
 import importlib.util
 import inspect
+import itertools
 import os
 import pkgutil
 import re
@@ -1481,13 +1482,14 @@
 
         apiurl = self.get_api_url()
         user = conf.get_apiurl_usr(apiurl)
+        project = None
+        package = None
 
-        if len(args) > 1:
+        if len(args) == 1:
+            self.argparser.error("project wide trigger is not supported")
+        elif len(args) > 1:
             project = args[0]
             package = args[1]
-        else:
-            project = None
-            package = None
 
         if opts.create:
             if not opts.operation:
@@ -5356,6 +5358,7 @@
         """
 
         from . import _private
+        from .conf import config
         from .core import decode_it
         from .core import get_buildconfig
         from .core import get_repositories_of_project
@@ -5363,13 +5366,13 @@
         from .core import store_read_project
 
         def _repo_type(apiurl, project, repo):
-            if not os.path.exists('/usr/lib/build/queryconfig'):
+            if not os.path.exists(config.queryconfig_cmd):
                 return None
             build_config = get_buildconfig(apiurl, project, repo)
             with tempfile.NamedTemporaryFile() as f:
                 f.write(build_config)
                 f.flush()
-                repo_type = return_external('/usr/lib/build/queryconfig', 
'--dist',
+                repo_type = return_external(config.queryconfig_cmd, '--dist',
                                             f.name, 'repotype').rstrip(b'\n')
             if not repo_type:
                 return None
@@ -7201,6 +7204,7 @@
         """
 
         from . import build as osc_build
+        from .conf import config
         from .core import decode_it
         from .core import get_buildconfig
         from .core import get_buildinfo
@@ -7258,14 +7262,14 @@
 
         extra_pkgs = opts.extra_pkgs.copy() if opts.extra_pkgs else []
 
-        if os.path.exists("/usr/lib/build/queryconfig") and not 
opts.nodebugpackages:
+        if os.path.exists(config.queryconfig_cmd) and not opts.nodebugpackages:
             with NamedTemporaryFile(mode="w+b", prefix="obs_buildconfig_") as 
bc_file:
                 # print('Getting buildconfig from server and store to %s' % 
bc_filename)
                 bc = get_buildconfig(apiurl, project, repository)
                 bc_file.write(bc)
                 bc_file.flush()
 
-                debug_pkgs = 
decode_it(return_external("/usr/lib/build/queryconfig", "--dist", bc_file.name, 
"substitute", "obs:cli_debug_packages"))
+                debug_pkgs = decode_it(return_external(config.queryconfig_cmd, 
"--dist", bc_file.name, "substitute", "obs:cli_debug_packages"))
                 if debug_pkgs:
                     extra_pkgs.extend(debug_pkgs.strip().split(" "))
 
@@ -7502,6 +7506,7 @@
         from . import build as osc_build
         from . import conf
         from . import store as osc_store
+        from .conf import config
         from .core import Package
         from .core import Repo
         from .core import decode_it
@@ -7516,35 +7521,41 @@
         """helper to parse the repo, arch and build description from args"""
         arg_arch = arg_repository = arg_descr = None
         if len(args) < 3:
-            # some magic, works only sometimes, but people seem to like it :/
-            all_archs = []
-            for mainarch in osc_build.can_also_build:
-                all_archs.append(mainarch)
-                for subarch in osc_build.can_also_build.get(mainarch):
-                    all_archs.append(subarch)
+            # HACK: some magic, works only sometimes, but people seem to like 
it :/
+
+            # gather all keys and values into a single set
+            all_architectures = 
set(itertools.chain(osc_build.can_also_build.keys(), 
*osc_build.can_also_build.values()))
+
             for arg in args:
-                if (arg.endswith('.spec') or arg.endswith('.dsc') or
-                    arg.endswith('.kiwi') or arg.endswith('.livebuild') or
-                    arg.endswith('flatpak.yaml') or 
arg.endswith('flatpak.yml') or
-                    arg.endswith('flatpak.json') or 
arg.startswith('Dockerfile.') or
-                    arg.startswith('Containerfile.') or
-                    arg in ('PKGBUILD', 'build.collax', 'Chart.yaml', 
'Containerfile', 'Dockerfile',
-                            'fissile.yml', 'appimage.yml', 
'_preinstallimage')):
-                    arg_descr = arg
-                else:
-                    if (arg == osc_build.hostarch or arg in all_archs) and 
arg_arch is None:
+                # 1. try if the arg matches a build recipe
+                if not arg_descr:
+                    try:
+                        osc_build.get_build_type_from_recipe_path(arg)
+                        arg_descr = arg
+                        continue
+                    except oscerr.OscValueError:
+                        pass
+
+                # 2. try if the arg is an architecture
+                if not arg_arch:
+                    if arg == osc_build.hostarch or arg in all_architectures:
                         # it seems to be an architecture in general
                         arg_arch = arg
                         if not (arg == osc_build.hostarch or arg in 
osc_build.can_also_build.get(osc_build.hostarch, [])):
-                            if vm_type not in ('qemu', 'emulator'):
-                                print("WARNING: native compile is not 
possible, a emulator via binfmt misc handler must be configured!")
-                    elif not arg_repository:
-                        arg_repository = arg
-                    else:
-                        #  raise oscerr.WrongArgs('\'%s\' is neither a build 
description nor a supported arch' % arg)
-                        # take it as arch (even though this is no supported 
arch) - hopefully, this invalid
-                        # arch will be detected below
-                        arg_arch = arg
+                            if vm_type not in ("qemu", "emulator"):
+                                print("WARNING: native compile is not 
possible, an emulator via binfmt misc handler must be configured!")
+                        continue
+
+                # 3. set repo
+                if not arg_repository:
+                    arg_repository = arg
+                    continue
+
+                # 4. if arg_repository was set already, use the arg as arg_arch
+                if not arg_arch:
+                    arg_arch = arg
+                    continue
+
         else:
             arg_repository, arg_arch, arg_descr = args
 
@@ -7595,15 +7606,7 @@
             if arches and arg_arch not in arches:
                 raise oscerr.WrongArgs(f"{arg_arch} is not a valid arch for 
the repository {arg_repository}, use one of: {', '.join(arches)}")
 
-        # can be implemented using
-        # reduce(lambda x, y: x + y, (glob.glob(x) for x in ('*.spec', 
'*.dsc', '*.kiwi')))
-        # but be a bit more readable :)
-        descr = glob.glob('*.spec') + glob.glob('*.dsc') + glob.glob('*.kiwi') 
+ glob.glob('*.livebuild') + \
-            glob.glob('PKGBUILD') + glob.glob('build.collax') + 
glob.glob('Dockerfile') + \
-            glob.glob('Dockerfile.*') + glob.glob('Containerfile') + 
glob.glob('Containerfile.*') + \
-            glob.glob('fissile.yml') + glob.glob('appimage.yml') + 
glob.glob('Chart.yaml') + \
-            glob.glob('*flatpak.yaml') + glob.glob('*flatpak.yml') + 
glob.glob('*flatpak.json') + \
-            glob.glob('*.productcompose') + glob.glob('mkosi.*')
+        descr = osc_build.find_build_recipes(".")
 
         # FIXME:
         # * request repos from server and select by build type.
@@ -7616,41 +7619,36 @@
                     raise oscerr.WrongArgs("Repository is missing. Cannot 
guess build description without repository")
                 apiurl = self.get_api_url()
                 project = alternative_project or store_read_project('.')
-                # some distros like Debian rename and move build to obs-build
-                if not os.path.isfile('/usr/lib/build/queryconfig') and 
os.path.isfile('/usr/lib/obs-build/queryconfig'):
-                    queryconfig = '/usr/lib/obs-build/queryconfig'
-                else:
-                    queryconfig = '/usr/lib/build/queryconfig'
                 if noinit:
                     bc_filename = f'_buildconfig-{arg_repository}-{arg_arch}'
-                    if is_package_dir('.'):
-                        bc_filename = os.path.join(Path.cwd(), store, 
bc_filename)
+                    if store_obj.is_package:
+                        if hasattr(store_obj, "cache_get_path"):
+                            bc_filename = store_obj.cache_get_path(bc_filename)
+                        else:
+                            bc_filename = os.path.join(Path.cwd(), store, 
bc_filename)
                     else:
                         bc_filename = os.path.abspath(bc_filename)
                     if not os.path.isfile(bc_filename):
                         raise oscerr.WrongOptions('--offline is not possible, 
no local buildconfig file')
-                    recipe = return_external(queryconfig, '--dist', 
bc_filename, 'type')
+                    recipe = return_external(config.queryconfig_cmd, '--dist', 
bc_filename, 'type', encoding="utf-8")
                 else:
                     bc = get_buildconfig(apiurl, project, arg_repository)
                     with tempfile.NamedTemporaryFile() as f:
                         f.write(bc)
                         f.flush()
-                        recipe = return_external(queryconfig, '--dist', 
f.name, 'type')
+                        recipe = return_external(config.queryconfig_cmd, 
'--dist', f.name, 'type', encoding="utf-8")
+
+                # recipe is obtained via queryconfig from _buildconfig
                 recipe = recipe.strip()
-                if recipe == 'arch':
-                    recipe = 'PKGBUILD'
-                recipe = decode_it(recipe)
+                build_type_obj = osc_build.get_build_type(recipe)
+                cands = build_type_obj.get_recipes(Path.cwd())
+
                 pac = os.path.basename(Path.cwd())
                 if is_package_dir(Path.cwd()):
                     pac = store_read_package(Path.cwd())
                 if multibuild_package:
                     pac = multibuild_package
-                if recipe == 'PKGBUILD':
-                    cands = [d for d in descr if d.startswith(recipe)]
-                elif recipe == 'mkosi':
-                    cands = [d for d in descr if d.startswith('mkosi.')]
-                else:
-                    cands = [d for d in descr if d.endswith('.' + recipe)]
+
                 if len(cands) > 1:
                     repo_cands = [d for d in cands if d == 
f'{pac}-{arg_repository}.{recipe}']
                     if repo_cands:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/commands_git/pr_create.py 
new/osc-1.24.0/osc/commands_git/pr_create.py
--- old/osc-1.23.0/osc/commands_git/pr_create.py        2025-12-18 
10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/commands_git/pr_create.py        2026-02-23 
13:35:22.000000000 +0100
@@ -72,6 +72,11 @@
             metavar="BRANCH",
             help="Target branch (default: derived from the current branch in 
local git repo)",
         )
+        self.add_argument(
+            "--self",
+            action="store_true",
+            help="Use the local git repository as the target for the pull 
request",
+        )
 
     def run(self, args):
         from osc import gitea_api
@@ -91,6 +96,9 @@
             local_owner, local_repo = git.get_owner_repo()
             local_branch = git.current_branch
             local_commit = git.get_branch_head(local_branch)
+        
+        if args.self and not use_local_git:
+            self.parser.error("--self can only be used together with local git 
repository (i.e. without --source-owner, --source-repo, --source-branch)")
 
         # remote git repo - source
         if use_local_git:
@@ -104,7 +112,10 @@
         source_repo_obj = gitea_api.Repo.get(self.gitea_conn, source_owner, 
source_repo)
         source_branch_obj = gitea_api.Branch.get(self.gitea_conn, 
source_owner, source_repo, source_branch)
 
-        if args.target_owner:
+        if args.self:
+            target_owner = source_owner
+            target_repo = source_repo
+        elif args.target_owner:
             target_owner = args.target_owner
 
             target_repo = None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/commands_git/pr_dump.py 
new/osc-1.24.0/osc/commands_git/pr_dump.py
--- old/osc-1.23.0/osc/commands_git/pr_dump.py  2025-12-18 10:30:46.000000000 
+0100
+++ new/osc-1.24.0/osc/commands_git/pr_dump.py  2026-02-23 13:35:22.000000000 
+0100
@@ -163,7 +163,7 @@
             )
 
         req = obs_api.Request(
-            id="{owner}/{repo}#{number}",
+            id=f"{owner}/{repo}#{number}",
             title=pr_obj.title,
             description=pr_obj.body,
             creator=pr_obj.user,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/commands_git/repo_fork.py 
new/osc-1.24.0/osc/commands_git/repo_fork.py
--- old/osc-1.23.0/osc/commands_git/repo_fork.py        2025-12-18 
10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/commands_git/repo_fork.py        2026-02-23 
13:35:22.000000000 +0100
@@ -14,6 +14,11 @@
     def init_arguments(self):
         self.add_argument_owner_repo(nargs="+")
         self.add_argument_new_repo_name()
+        self.add_argument(
+            "--target-owner",
+            metavar="OWNER",
+            help="Target owner (default: user's organization)"
+        )
 
     def run(self, args):
         from osc import gitea_api
@@ -24,12 +29,18 @@
         if len(args.owner_repo) > 1 and args.new_repo_name:
             self.parser.error("The --new-repo-name option cannot be used with 
multiple repos")
 
+        target_owner = None
+        if args.target_owner:
+            target_owner = args.target_owner
+
         num_entries = 0
         failed_entries = []
         for owner, repo in args.owner_repo:
             print(f"Forking git repo {owner}/{repo} ...", file=sys.stderr)
             try:
-                repo_obj = gitea_api.Fork.create(self.gitea_conn, owner, repo, 
new_repo_name=args.new_repo_name)
+                repo_obj = gitea_api.Fork.create(
+                    self.gitea_conn, owner, repo,
+                    new_repo_name=args.new_repo_name, target_org=target_owner)
                 fork_owner = repo_obj.owner
                 fork_repo = repo_obj.repo
                 print(f" * Fork created: {fork_owner}/{fork_repo}", 
file=sys.stderr)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/conf.py new/osc-1.24.0/osc/conf.py
--- old/osc-1.23.0/osc/conf.py  2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/conf.py  2026-02-23 13:35:22.000000000 +0100
@@ -1320,6 +1320,18 @@
         ini_key="queryrecipe-cmd",
     )  # type: ignore[assignment]
 
+    queryconfig_cmd: str = Field(
+        default=
+            shutil.which("queryconfig", 
path="/usr/bin:/usr/lib/build:/usr/lib/obs-build")
+            or "/usr/lib/build/queryconfig",
+        description=textwrap.dedent(
+            """
+            Path to the 'queryconfig' tool.
+            """
+        ),
+        ini_key="queryconfig-cmd",
+    )  # type: ignore[assignment]
+
     obs_scm_bridge_cmd: str = Field(
         default=
             shutil.which("obs_scm_bridge", path="/usr/lib/obs/service")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/connection.py 
new/osc-1.24.0/osc/connection.py
--- old/osc-1.23.0/osc/connection.py    2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/connection.py    2026-02-23 13:35:22.000000000 +0100
@@ -1,4 +1,6 @@
 import base64
+import contextlib
+import datetime
 import fcntl
 import inspect
 import os
@@ -32,6 +34,23 @@
 warnings.filterwarnings("once", 
category=urllib3.exceptions.InsecureRequestWarning)
 
 
[email protected]
+def debug_timer(msg="The request took"):
+    if not int(conf.config["http_debug"]) and not 
int(conf.config["http_full_debug"]):
+        yield
+        return
+
+    start_perf = time.perf_counter()
+    try:
+        yield
+    finally:
+        end_perf = time.perf_counter()
+        duration = end_perf - start_perf
+
+        timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+        print(f"[{timestamp}] DEBUG:", msg, f"{duration:.4f}s", 
file=sys.stderr)
+
+
 class MockRequest:
     """
     Mock a request object for `cookiejar.extract_cookies()`
@@ -86,7 +105,8 @@
                 # (?<=...) - '...' must be present before the pattern 
(positive lookbehind assertion)
                 args[1] = re.sub(r"(?<=\\r\\n)authorization:.*?\\r\\n", "", 
args[1], re.I)
                 args[1] = re.sub(r"(?<=\\r\\n)Cookie:.*?\\r\\n", "", args[1], 
re.I)
-        print("DEBUG:", *args, file=sys.stderr)
+        timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+        print(f"[{timestamp}] DEBUG:", *args, file=sys.stderr)
     http.client.print = new_print
 
 
@@ -211,7 +231,8 @@
             # direct connection
             manager = POOL_MANAGER
 
-        response = manager.urlopen(method, url, body=data, headers=headers, 
preload_content=False)
+        with debug_timer():
+            response = manager.urlopen(method, url, body=data, 
headers=headers, preload_content=False)
 
         if response.status / 100 != 2:
             raise urllib.error.HTTPError(url, response.status, 
response.reason, response.headers, response)
@@ -343,10 +364,11 @@
         http.client.print(method, url)
 
     try:
-        response = pool.urlopen(
-            method, urlopen_url, body=data, headers=headers,
-            preload_content=False, assert_same_host=assert_same_host
-        )
+        with debug_timer():
+            response = pool.urlopen(
+                method, urlopen_url, body=data, headers=headers,
+                preload_content=False, assert_same_host=assert_same_host
+            )
     except urllib3.exceptions.MaxRetryError as e:
         if not isinstance(e.reason, urllib3.exceptions.SSLError):
             # re-raise exceptions that are not related to SSL
@@ -374,10 +396,11 @@
         if hasattr(data, 'seek'):
             data.seek(0)
 
-        response = pool.urlopen(
-            method, urlopen_url, body=data, headers=headers,
-            preload_content=False, assert_same_host=assert_same_host
-        )
+        with debug_timer():
+            response = pool.urlopen(
+                method, urlopen_url, body=data, headers=headers,
+                preload_content=False, assert_same_host=assert_same_host
+            )
 
     if response.status == 401:
         # session cookie has expired, re-authenticate
@@ -387,7 +410,8 @@
                 break
         if hasattr(data, 'seek'):
             data.seek(0)
-        response = pool.urlopen(method, urlopen_url, body=data, 
headers=headers, preload_content=False)
+        with debug_timer():
+            response = pool.urlopen(method, urlopen_url, body=data, 
headers=headers, preload_content=False)
 
     # we want to save a session cookie before an exception is raised on failed 
requests
     for handler in auth_handlers:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/core.py new/osc-1.24.0/osc/core.py
--- old/osc-1.23.0/osc/core.py  2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/core.py  2026-02-23 13:35:22.000000000 +0100
@@ -101,56 +101,52 @@
 BUFSIZE = 1024 * 1024
 
 
+# openSUSE Leap repository naming was discussed and decided here:
+# 
https://lists.opensuse.org/archives/list/[email protected]/thread/RIUQNUEQ2SQ2UT4Q3RIJVHISUY3IAJGV/
 new_project_templ = """\
 <project name="%(name)s">
 
-  <title></title> <!-- Short title of NewProject -->
+  <!-- Short title of the project. -->
+  <title></title>
+
+  <!-- Longer description of the purpose of the project. -->
   <description></description>
-    <!-- This is for a longer description of the purpose of the project -->
 
-  <!-- Uncomment and specify an URL and branch if your project is managed in 
git.
+  <!-- Uncomment and specify a URL and branch if your project is managed in 
Git.
   <scmsync>url#branch</scmsync>
   -->
 
   <person role="maintainer" userid="%(user)s" />
   <person role="bugowner" userid="%(user)s" />
-<!-- remove this block to publish your packages on the mirrors -->
+
   <publish>
+    <!-- Change to 'enable' to publish your packages on the mirrors. -->
     <disable />
   </publish>
+
   <build>
     <enable />
   </build>
+
   <debuginfo>
     <enable />
   </debuginfo>
 
-<!-- remove this comment to enable one or more build targets
+<!--
+  Uncomment to enable one or more build targets.
+  Add corresponding i586 entries if your packages use baselibs.conf and 
produce -32bit packages, or if you want i586 builds.
 
   <repository name="openSUSE_Factory">
     <path project="openSUSE:Factory" repository="snapshot" />
     <arch>x86_64</arch>
-    <arch>i586</arch>
-  </repository>
-  <repository name="openSUSE_13.2">
-    <path project="openSUSE:13.2" repository="standard"/>
-    <arch>x86_64</arch>
-    <arch>i586</arch>
   </repository>
-  <repository name="openSUSE_13.1">
-    <path project="openSUSE:13.1" repository="standard"/>
+  <repository name="16.0">
+    <path project="openSUSE:Leap:16.0" repository="standard"/>
     <arch>x86_64</arch>
-    <arch>i586</arch>
   </repository>
-  <repository name="Fedora_21">
-    <path project="Fedora:21" repository="standard" />
+  <repository name="Fedora_Rawhide">
+    <path project="Fedora:Rawhide" repository="standard" />
     <arch>x86_64</arch>
-    <arch>i586</arch>
-  </repository>
-  <repository name="SLE_12">
-    <path project="SUSE:SLE-12:GA" repository="standard" />
-    <arch>x86_64</arch>
-    <arch>i586</arch>
   </repository>
 -->
 
@@ -182,8 +178,8 @@
   or a combination thereof:
 
   <disable arch="x86_64"/>
-  <disable repository="SUSE_SLE-10"/>
-  <disable repository="SUSE_SLE-10" arch="x86_64"/>
+  <disable repository="Fedora_Rawhide"/>
+  <disable repository="Fedora_Rawhide" arch="x86_64"/>
 
   Possible sections where you can use the tags above:
   <build>
@@ -2459,7 +2455,7 @@
 
         # post-process results until we switch back to the /search/request
         # which seems to be more suitable for such queries
-        exclude = False
+        match = False
         for action in r.actions:
             src_project = getattr(action, "src_project", None)
             src_package = getattr(action, "src_package", None)
@@ -2468,27 +2464,28 @@
 
             # skip if neither of source and target project matches
             if "project" in query and query["project"] not in (src_project, 
tgt_project):
-                exclude = True
-                break
+                continue
 
             # skip if neither of source and target package matches
             if "package" in query and query["package"] not in (src_package, 
tgt_package):
-                exclude = True
-                break
+                continue
 
             if not conf.config["include_request_from_project"]:
+                # skip if include_request_from_project=1 and the query matches 
the source prj/pac
                 if "project" in query and "package" in query:
                     if (src_project, src_package) == (query["project"], 
query["package"]):
-                        exclude = True
-                        break
+                        continue
+                # skip if include_request_from_project=1 and the query matches 
the source prj
                 elif "project" in query:
                     if src_project == query["project"]:
-                        exclude = True
-                        break
-        if exclude:
-            continue
+                        continue
+
+            match = True
+            break
+
+        if match:
+            requests.append(r)
 
-        requests.append(r)
     return requests
 
 
@@ -3568,7 +3565,10 @@
 
     if meta_change:
         src_meta = show_package_meta(apiurl, src_project, src_package_meta)
-        dst_meta = replace_pkg_meta(src_meta, dst_package_meta, dst_project)
+        # if we're creating a new package by copying meta, we strip scmsync 
because:
+        # - we only care about aggregating binaries
+        # - preserving scmsync would prevent us from creating the _aggregate 
file
+        dst_meta = replace_pkg_meta(src_meta, dst_package_meta, dst_project, 
keep_scmsync=False)
         meta_change = True
 
     if disable_publish:
@@ -3850,7 +3850,7 @@
         if not any([expand, revision]):
             raise oscerr.OscValueError("Cannot copy package. Source and target 
are the same.")
 
-    meta = None
+    meta = new_meta = src_meta = None
     if not (src_apiurl == dst_apiurl and src_project == dst_project
             and src_package == dst_package):
         src_meta = show_package_meta(src_apiurl, src_project, src_package)
@@ -3865,17 +3865,28 @@
         except HTTPError as e:
             pass
         if force_meta_update or not found:
-            print('Sending meta data...')
-            u = makeurl(dst_apiurl, ['source', dst_project, dst_package, 
'_meta'])
-            http_PUT(u, data=meta)
-
-    if meta is None:
-        meta = show_files_meta(dst_apiurl, dst_project, dst_package)
-
-    root = xml_fromstring(meta)
-    if root.find("scmsync") is not None:
-        print("Note: package source is managed via SCM")
-        return
+            new_meta = meta
+
+    if new_meta:
+        # we are about to create a new package instance. be sure we don't 
blindly copy scm sources
+        # instead hinting the user to think about is setup.
+        root = xml_fromstring(b''.join(src_meta))
+        if root.find("scmsync") is not None:
+            print("Note: the source is managed via SCM. You may want to 
reference directly to the same scm instead?")
+            return
+    else:
+        # destination exists, we copy from any source, but avoid the backend 
error
+        # when trying to copy on an scmsync package.
+        dst_meta = show_package_meta(dst_apiurl, dst_project, dst_package)
+        root = xml_fromstring(b''.join(dst_meta))
+        if root.find("scmsync") is not None:
+            print("Note: package source in target is managed via SCM")
+            return
+
+    if new_meta:
+        print('Sending meta data...')
+        u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
+        http_PUT(u, data=new_meta)
 
     print('Copying files...')
     if not client_side_copy:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/output/output.py 
new/osc-1.24.0/osc/output/output.py
--- old/osc-1.23.0/osc/output/output.py 2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/osc/output/output.py 2026-02-23 13:35:22.000000000 +0100
@@ -1,3 +1,4 @@
+import datetime
 import os
 import platform
 import re
@@ -32,7 +33,8 @@
     elif print_to == "debug":
         # print a debug message to stderr if config["debug"] is set
         if conf.config["debug"]:
-            print("DEBUG:", *args, file=sys.stderr)
+            timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+            print(f"[{timestamp}] DEBUG:", *args, file=sys.stderr)
     elif print_to == "verbose":
         # print a verbose message to stdout if config["verbose"] or 
config["debug"] is set
         if conf.config["verbose"] or conf.config["debug"]:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/osc/util/git_version.py 
new/osc-1.24.0/osc/util/git_version.py
--- old/osc-1.23.0/osc/util/git_version.py      2025-12-18 10:30:46.000000000 
+0100
+++ new/osc-1.24.0/osc/util/git_version.py      2026-02-23 13:35:22.000000000 
+0100
@@ -9,7 +9,7 @@
     """
     # the `version` variable contents get substituted during `git archive`
     # it requires adding this to .gitattributes: <path to this file> 
export-subst
-    version = "1.23.0"
+    version = "1.24.0"
     if version.startswith(("$", "%")):
         # "$": version hasn't been substituted during `git archive`
         # "%": "Format:" and "$" characters get removed from the version 
string (a GitHub bug?)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/osc-1.23.0/tests/test_output.py 
new/osc-1.24.0/tests/test_output.py
--- old/osc-1.23.0/tests/test_output.py 2025-12-18 10:30:46.000000000 +0100
+++ new/osc-1.24.0/tests/test_output.py 2026-02-23 13:35:22.000000000 +0100
@@ -100,8 +100,7 @@
         stderr = io.StringIO()
         with contextlib.redirect_stdout(stdout), 
contextlib.redirect_stderr(stderr):
             print_msg("foo", "bar", print_to="debug")
-        self.assertEqual("", stdout.getvalue())
-        self.assertEqual("DEBUG: foo bar\n", stderr.getvalue())
+        self.assertRegex(stderr.getvalue(), r"^\[....-..-.. ..:..:..\] DEBUG: 
foo bar\n", stderr.getvalue())
 
     def test_verbose(self):
         osc.conf.config["verbose"] = False

++++++ osc.dsc ++++++
--- /var/tmp/diff_new_pack.Dm68z1/_old  2026-02-23 16:15:26.704497254 +0100
+++ /var/tmp/diff_new_pack.Dm68z1/_new  2026-02-23 16:15:26.708497419 +0100
@@ -1,6 +1,6 @@
 Format: 1.0
 Source: osc
-Version: 1.23.0-0
+Version: 1.24.0-0
 Binary: osc
 Maintainer: Adrian Schroeter <[email protected]>
 Architecture: any

Reply via email to