Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package kapidox for openSUSE:Factory checked 
in at 2022-05-16 18:07:46
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/kapidox (Old)
 and      /work/SRC/openSUSE:Factory/.kapidox.new.1538 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "kapidox"

Mon May 16 18:07:46 2022 rev:102 rq:977147 version:5.94.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/kapidox/kapidox.changes  2022-04-11 
23:49:52.358639838 +0200
+++ /work/SRC/openSUSE:Factory/.kapidox.new.1538/kapidox.changes        
2022-05-16 18:10:01.937350459 +0200
@@ -1,0 +2,12 @@
+Tue May 10 08:17:45 UTC 2022 - Christophe Giboudeaux <christo...@krop.fr>
+
+- Update to 5.94.0
+  * New feature release
+  * For more details please see:
+  * https://kde.org/announcements/frameworks/5/5.94.0
+- Changes since 5.93.0:
+  * only bootstrap when explicitly asked for
+  * General code style cleanup
+  * Add a dummy install part to cmake
+
+-------------------------------------------------------------------

Old:
----
  kapidox-5.93.0.tar.xz
  kapidox-5.93.0.tar.xz.sig

New:
----
  kapidox-5.94.0.tar.xz
  kapidox-5.94.0.tar.xz.sig

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ kapidox.spec ++++++
--- /var/tmp/diff_new_pack.jKjll8/_old  2022-05-16 18:10:02.505350911 +0200
+++ /var/tmp/diff_new_pack.jKjll8/_new  2022-05-16 18:10:02.509350914 +0200
@@ -18,13 +18,13 @@
 
 # Only needed for the package signature condition
 %bcond_without released
-%define _tar_path 5.93
+%define _tar_path 5.94
 # Full KF5 version (e.g. 5.33.0)
 %{!?_kf5_version: %global _kf5_version %{version}}
 # Last major and minor KF5 version (e.g. 5.33)
 %{!?_kf5_bugfix_version: %define _kf5_bugfix_version %(echo %{_kf5_version} | 
awk -F. '{print $1"."$2}')}
 Name:           kapidox
-Version:        5.93.0
+Version:        5.94.0
 Release:        0
 Summary:        Scripts and data for building API documentation
 License:        BSD-2-Clause


++++++ kapidox-5.93.0.tar.xz -> kapidox-5.94.0.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/CMakeLists.txt 
new/kapidox-5.94.0/CMakeLists.txt
--- old/kapidox-5.93.0/CMakeLists.txt   2022-03-13 13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/CMakeLists.txt   2022-04-19 11:14:35.000000000 +0200
@@ -1,17 +1,21 @@
 cmake_minimum_required(VERSION 3.16)
 project(KApiDox NONE)
 
-include(FindUnixCommands)
+if(DO_BOOTSTRAP)
+    include(FindUnixCommands)
 
-if(WIN32)
-    # Needs native path and extra escaping of spaces
-    file(TO_NATIVE_PATH "${CMAKE_BINARY_DIR}" BINARY_DIR)
-else()
-    set(BINARY_DIR ${CMAKE_BINARY_DIR})
-endif()
+    if(WIN32)
+        # Needs native path and extra escaping of spaces
+        file(TO_NATIVE_PATH "${CMAKE_BINARY_DIR}" BINARY_DIR)
+    else()
+        set(BINARY_DIR ${CMAKE_BINARY_DIR})
+    endif()
 
-add_custom_target(build ALL
-    COMMAND ${BASH} bootstrap-devenv.sh
-    WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
-)
+    add_custom_target(build ALL
+        COMMAND ${BASH} bootstrap-devenv.sh
+        WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
+    )
+endif()
 
+# A dummy install part to fulfill needs of kdesrc-build and possible make 
installs
+install(RUNTIME_DEPENDENCY_SET kapidox OPTIONAL)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/argparserutils.py 
new/kapidox-5.94.0/kapidox/argparserutils.py
--- old/kapidox-5.93.0/kapidox/argparserutils.py        2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/argparserutils.py        2022-04-19 
11:14:35.000000000 +0200
@@ -21,13 +21,13 @@
         description=textwrap.dedent('''Generate API documentation of complex 
projects.
 
 >> This function must be run from an empty directory (where the documentation 
 >> will be built).''')
-        )
+    )
     group = add_sources_group(parser)
     group.add_argument('sourcesdir', type=normalized_path,
-            help='Location of the sources.')
+                       help='Location of the sources.')
     group.add_argument('--depdiagram-dot-dir', type=normalized_path,
-            help='Generate dependency diagrams, using the .dot files from 
DIR.',
-            metavar="DIR")
+                       help='Generate dependency diagrams, using the .dot 
files from DIR.',
+                       metavar="DIR")
     add_output_group(parser)
     add_qt_doc_group(parser)
     add_paths_group(parser)
@@ -51,43 +51,43 @@
 def add_sources_group(parser):
     group = parser.add_argument_group('sources')
     group.add_argument('--accountsfile', type=normalized_path,
-            help='File with accounts information of SVN contributors.')
+                       help='File with accounts information of SVN 
contributors.')
     return group
 
 
 def add_output_group(parser):
     group = parser.add_argument_group('output options')
     group.add_argument('--title', default='API Documentation',
-            help='String to use for page titles.')
+                       help='String to use for page titles.')
     group.add_argument('--man-pages', action='store_true',
-            help='Generate man page documentation.')
+                       help='Generate man page documentation.')
     group.add_argument('--qhp', action='store_true',
-            help='Generate Qt Compressed Help documentation.')
+                       help='Generate Qt Compressed Help documentation.')
     return group
 
 
 def add_qt_doc_group(parser):
     group = parser.add_argument_group('Qt documentation')
     group.add_argument('--qtdoc-dir', type=normalized_path,
-            help='Location of (local) Qt documentation; this is searched ' +
-                 'for tag files to create links to Qt classes.')
+                       help='Location of (local) Qt documentation; this is 
searched ' +
+                            'for tag files to create links to Qt classes.')
     group.add_argument('--qtdoc-link',
-            help='Override Qt documentation location for the links in the ' +
-                 'html files.  May be a path or URL.')
+                       help='Override Qt documentation location for the links 
in the ' +
+                            'html files.  May be a path or URL.')
     group.add_argument('--qtdoc-flatten-links', action='store_true',
-            help='Whether to assume all Qt documentation html files ' +
-                 'are immediately under QTDOC_LINK (useful if you set ' +
-                 'QTDOC_LINK to the online Qt documentation).  Ignored ' +
-                 'if QTDOC_LINK is not set.')
+                       help='Whether to assume all Qt documentation html files 
' +
+                            'are immediately under QTDOC_LINK (useful if you 
set ' +
+                            'QTDOC_LINK to the online Qt documentation).  
Ignored ' +
+                            'if QTDOC_LINK is not set.')
     return group
 
 
 def add_paths_group(parser):
     group = parser.add_argument_group('paths')
     group.add_argument('--doxygen', default='doxygen', type=normalized_path,
-            help='(Path to) the doxygen executable.')
+                       help='(Path to) the doxygen executable.')
     group.add_argument('--qhelpgenerator', default='qhelpgenerator', 
type=normalized_path,
-            help='(Path to) the qhelpgenerator executable.')
+                       help='(Path to) the qhelpgenerator executable.')
     return group
 
 
@@ -97,15 +97,15 @@
 
     group = parser.add_argument_group('misc')
     group.add_argument('--doxdatadir', default=doxdatadir, 
type=normalized_path,
-            help='Location of the HTML header files and support graphics.')
+                       help='Location of the HTML header files and support 
graphics.')
     group.add_argument('--keep-temp-dirs', action='store_true',
-            help='Do not delete temporary dirs, useful for debugging.')
+                       help='Do not delete temporary dirs, useful for 
debugging.')
     return parser
 
 
 def check_common_args(args):
     if not _is_doxdatadir(args.doxdatadir):
-        logging.error("{} is not a valid doxdatadir".format(args.doxdatadir))
+        logging.error(f'{args.doxdatadir} is not a valid doxdatadir')
         sys.exit(1)
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/depdiagram_generate.py 
new/kapidox-5.94.0/kapidox/depdiagram_generate.py
--- old/kapidox-5.93.0/kapidox/depdiagram_generate.py   2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/depdiagram_generate.py   2022-04-19 
11:14:35.000000000 +0200
@@ -18,16 +18,16 @@
     parser = argparse.ArgumentParser(description=DESCRIPTION)
 
     parser.add_argument("-o", "--output", dest="output", default="-",
-        help="Output to FILE", metavar="FILE")
+                        help="Output to FILE", metavar="FILE")
 
     parser.add_argument("--qt", dest="qt", action="store_true",
-        help="Show Qt libraries")
+                        help="Show Qt libraries")
 
     parser.add_argument("--detailed", dest="detailed", action="store_true",
-        help="Show targets within frameworks")
+                        help="Show targets within frameworks")
 
     parser.add_argument("--framework", dest="framework",
-        help="Only show dependencies of framework FRAMEWORK", 
metavar="FRAMEWORK")
+                        help="Only show dependencies of framework FRAMEWORK", 
metavar="FRAMEWORK")
 
     parser.add_argument("dot_files", nargs="+")
 
@@ -43,6 +43,7 @@
     else:
         return 1
 
+
 if __name__ == "__main__":
     sys.exit(main())
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/depdiagram_generate_all 
new/kapidox-5.94.0/kapidox/depdiagram_generate_all
--- old/kapidox-5.93.0/kapidox/depdiagram_generate_all  2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/depdiagram_generate_all  2022-04-19 
11:14:35.000000000 +0200
@@ -5,9 +5,9 @@
 # SPDX-License-Identifier: BSD-2-Clause
 
 oldpwd=$PWD
-cd $(dirname $0)
+cd "$(dirname "$0")"
 bindir=$PWD
-cd $oldpwd
+cd "$oldpwd"
 generate=$bindir/depdiagram-generate
 
 die() {
@@ -20,7 +20,7 @@
 fi
 
 if [ "$#" -ne 2 ] ; then
-    die "Usage: $(basename $0) <dot_dir> <png_dir>"
+    die "Usage: $(basename "$0") <dot_dir> <png_dir>"
 fi
 
 dot_dir=$1
@@ -31,42 +31,42 @@
 fi
 
 read_tier() {
-    sed --quiet '/tier:/s/tier: *//p' $1
+    sed --quiet '/tier:/s/tier: *//p' "$1"
 }
 
 gen_fws() {
     tmp_dot=/tmp/diagram.dot
 
     for fw_dot in $fw_dots ; do
-        fw=$(basename $fw_dot .dot)
+        fw=$(basename "$fw_dot" .dot)
         echo "$fw"
 
         # Gather info
-        tier=$(read_tier $(echo $fw_dot | sed 's/dot$/yaml/'))
-        if [ $tier -lt 3 ] ; then
+        tier=$(read_tier "$(echo "$fw_dot" | sed 's/dot$/yaml/')")
+        if [ "$tier" -lt 3 ] ; then
             opts=--qt
         else
             opts=""
         fi
 
         # Generate .dot file
-        $generate $fw_dots --framework $fw $opts > $tmp_dot
+        $generate "$fw_dots" --framework "$fw" "$opts" > $tmp_dot
 
         # Turn .dot into a .png
-        dot -Tpng $tmp_dot > $png_dir/$fw.png
+        dot -Tpng $tmp_dot > "$png_dir"/"$fw".png
 
         # Simplify .dot and turn it into a .png
-        tred $tmp_dot | dot -Tpng > $png_dir/$fw-simplified.png
+        tred $tmp_dot | dot -Tpng > "$png_dir"/"$fw"-simplified.png
     done
 
     rm $tmp_dot
 }
 
-mkdir -p $png_dir
+mkdir -p "$png_dir"
 
-fw_dots=$(find $dot_dir -name '*.dot')
+fw_dots=$(find "$dot_dir" -name '*.dot')
 
 gen_fws
 
 echo "kf5"
-$generate $fw_dots | tred | dot -Tpng > $png_dir/kf5.png
+$generate "$fw_dots" | tred | dot -Tpng > "$png_dir"/kf5.png
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/depdiagram_prepare.py 
new/kapidox-5.94.0/kapidox/depdiagram_prepare.py
--- old/kapidox-5.93.0/kapidox/depdiagram_prepare.py    2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/depdiagram_prepare.py    2022-04-19 
11:14:35.000000000 +0200
@@ -20,6 +20,7 @@
 Generate Graphviz dot files for one or all frameworks.
 """
 
+
 def generate_dot(fw_dir, fw_name, output_dir):
     """Calls cmake to generate the dot file for a framework.
 
@@ -28,8 +29,8 @@
     build_dir = tempfile.mkdtemp(prefix="depdiagram-prepare-build-")
     try:
         ret = subprocess.call(["cmake", fw_dir, 
"--graphviz={}".format(dot_path)],
-            stdout=open("/dev/null", "w"),
-            cwd=build_dir)
+                              stdout=open("/dev/null", "w"),
+                              cwd=build_dir)
         if ret != 0:
             if os.path.exists(dot_path):
                 os.remove(dot_path)
@@ -53,7 +54,7 @@
 
     yaml_path = os.path.join(fw_dir, "metainfo.yaml")
     if not os.path.exists(yaml_path):
-        logging.error("'{}' is not a framework: '{}' does not 
exist.".format(fw_dir, yaml_path))
+        logging.error(f"'{fw_dir}' is not a framework: '{yaml_path}' does not 
exist.")
         return False
 
     if not os.path.exists(output_dir):
@@ -79,7 +80,7 @@
             continue
 
         progress = int(100 * (idx + 1) / len(lst))
-        print("{}% {}".format(progress, fw_name))
+        print(f'{progress}% {fw_name}')
         if not prepare_one(fw_dir, dot_dir):
             fails.append(fw_name)
     return fails
@@ -90,13 +91,13 @@
 
     group = parser.add_mutually_exclusive_group(required=True)
     group.add_argument("-s", "--single",
-        help="Generate dot files for the framework stored in DIR",
-        metavar="DIR")
+                       help="Generate dot files for the framework stored in 
DIR",
+                       metavar="DIR")
     group.add_argument("-a", "--all",
-        help="Generate dot files for all frameworks whose dir is in BASE_DIR",
-        metavar="BASE_DIR")
+                       help="Generate dot files for all frameworks whose dir 
is in BASE_DIR",
+                       metavar="BASE_DIR")
     parser.add_argument("dot_dir",
-        help="Destination dir where dot files will be generated")
+                        help="Destination dir where dot files will be 
generated")
 
     args = parser.parse_args()
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/doxyfilewriter.py 
new/kapidox-5.94.0/kapidox/doxyfilewriter.py
--- old/kapidox-5.93.0/kapidox/doxyfilewriter.py        2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/doxyfilewriter.py        2022-04-19 
11:14:35.000000000 +0200
@@ -9,6 +9,7 @@
 def _quote(txt):
     return '"' + txt + '"'
 
+
 class DoxyfileWriter(object):
     """Makes it easy to write entries in a Doxyfile, correctly handling quoting
     """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/generator.py 
new/kapidox-5.94.0/kapidox/generator.py
--- old/kapidox-5.93.0/kapidox/generator.py     2022-03-13 13:10:03.000000000 
+0100
+++ new/kapidox-5.94.0/kapidox/generator.py     2022-04-19 11:14:35.000000000 
+0200
@@ -304,7 +304,7 @@
         A list of pairs of (tag_file,link_path)
     """
 
-    if not suggestion is None:
+    if suggestion is not None:
         if not os.path.isdir(suggestion):
             logging.warning(suggestion + " is not a directory")
         else:
@@ -387,7 +387,7 @@
     for example if the documentation contains raw HTML.
 
     The key/value block is kept in a comment so that it does not appear in Qt
-    Compressed Help output, which is not postprocessed by ourself.
+    Compressed Help output, which is not post processed by ourself.
     """
     dct = {}
     body = []
@@ -647,8 +647,7 @@
         try:
             subprocess.check_call(cmd, **kwargs)
         except subprocess.CalledProcessError as exc:
-            logging.error('Command {exc.cmd} failed with error code {}.'
-                          .format(exc.returncode))
+            logging.error(f'Command {exc.cmd} failed with error code 
{exc.returncode}.')
             return False
         return True
 
@@ -736,7 +735,7 @@
         prefix = '../../'
     else:
         prefix = '../../'
-    return (tagfile, prefix + lib.outputdir + '/html/')
+    return tagfile, prefix + lib.outputdir + '/html/'
 
 
 def finish_fw_apidocs_doxygen(ctx: Context, env: Dict[str, Any]):
@@ -810,7 +809,7 @@
           <field name="keywords"></field>
           <field name="text"></field>
         </doc>
-      </add
+      </add>
     """
 
     doclist = []
@@ -821,7 +820,7 @@
             if child.attrib['name'] == "type":
                 if child.text == 'source':
                     field = None
-                    break; # We go to next <doc>
+                    break  # We go to next <doc>
                 field['type'] = child.text
             elif child.attrib['name'] == "name":
                 field['name'] = child.text
@@ -878,7 +877,7 @@
             prodindex = json.load(f)
             for proditem in prodindex['libraries']:
                 for item in proditem['docfields']:
-                    item['url'] = product.name + '/' + item['url']
+                    item['url'] = os.path.join(product.name, item['url'])
             doclist.append(prodindex)
 
     indexdic = {
@@ -892,7 +891,7 @@
 def create_qch(products, tagfiles):
     tag_root = "QtHelpProject"
     tag_files = "files"
-    tag_filterSection = "filterSection"
+    tag_filter_section = "filterSection"
     tag_keywords = "keywords"
     tag_toc = "toc"
     for product in products:
@@ -901,17 +900,17 @@
         root_out.set("version", "1.0")
         namespace = ET.SubElement(root_out, "namespace")
         namespace.text = "org.kde." + product.name
-        virtualFolder = ET.SubElement(root_out, "virtualFolder")
-        virtualFolder.text = product.name
-        filterSection = ET.SubElement(root_out, tag_filterSection)
-        filterAttribute = ET.SubElement(filterSection, "filterAttribute")
-        filterAttribute.text = "doxygen"
-        toc = ET.SubElement(filterSection, "toc")
-        keywords = ET.SubElement(filterSection, tag_keywords)
+        virtual_folder = ET.SubElement(root_out, "virtualFolder")
+        virtual_folder.text = product.name
+        filter_section = ET.SubElement(root_out, tag_filter_section)
+        filter_attribute = ET.SubElement(filter_section, "filterAttribute")
+        filter_attribute.text = "doxygen"
+        toc = ET.SubElement(filter_section, "toc")
+        keywords = ET.SubElement(filter_section, tag_keywords)
         if len(product.libraries) > 0:
             if product.libraries[0].part_of_group:
-                product_indexSection = ET.SubElement(toc, "section", {'ref': 
product.name + "/index.html", 'title': product.fancyname})
-        files = ET.SubElement(filterSection, tag_files)
+                product_index_section = ET.SubElement(toc, "section", {'ref': 
product.name + "/index.html", 'title': product.fancyname})
+        files = ET.SubElement(filter_section, tag_files)
 
         for lib in sorted(product.libraries, key=lambda lib: lib.name):
             tree = ET.parse(lib.outputdir + '/html/index.qhp')
@@ -921,11 +920,11 @@
                     child.attrib['ref'] = lib.name + "/html/" + 
child.attrib['ref']
                 else:
                     child.attrib['ref'] = "html/" + child.attrib['ref']
-                child.attrib['ref'] = product.name + '/' +child.attrib['ref']
+                child.attrib['ref'] = product.name + '/' + child.attrib['ref']
 
             for child in root.find(".//"+tag_toc):
                 if lib.part_of_group:
-                    product_indexSection.append(child)
+                    product_index_section.append(child)
                 else:
                     toc.append(child)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/hlfunctions.py 
new/kapidox-5.94.0/kapidox/hlfunctions.py
--- old/kapidox-5.93.0/kapidox/hlfunctions.py   2022-03-13 13:10:03.000000000 
+0100
+++ new/kapidox-5.94.0/kapidox/hlfunctions.py   2022-04-19 11:14:35.000000000 
+0200
@@ -20,6 +20,7 @@
 
 try:
     from kapidox import depdiagram
+
     DEPDIAGRAM_AVAILABLE = True
 except ImportError:
     DEPDIAGRAM_AVAILABLE = False
@@ -28,7 +29,7 @@
 def do_it(maintainers_fct, copyright, searchpaths=None):
     utils.setup_logging()
     if searchpaths is None:
-        searchpaths = searchpaths=['/usr/share/doc/qt5', '/usr/share/doc/qt']
+        searchpaths = searchpaths = ['/usr/share/doc/qt5', '/usr/share/doc/qt']
     args = argparserutils.parse_args(DEPDIAGRAM_AVAILABLE)
 
     if len(os.listdir(os.getcwd())) > 0:
@@ -55,34 +56,34 @@
     if os.path.isdir(dirdest):
         shutil.rmtree(dirdest)
     shutil.copytree(dirsrc, dirdest)
-    os.rename(dirdest+'/favicon.ico', './favicon.ico')
-    os.rename(dirdest+'/worker.js', './worker.js')
+    os.rename(dirdest + '/favicon.ico', './favicon.ico')
+    os.rename(dirdest + '/worker.js', './worker.js')
 
     generator.process_toplevel_html_file('index.html',
-                               args.doxdatadir,
-                               title=args.title,
-                               products=products,
-                               qch_enabled=args.qhp
-                               )
+                                         args.doxdatadir,
+                                         title=args.title,
+                                         products=products,
+                                         qch_enabled=args.qhp
+                                         )
     generator.process_subgroup_html_files('index.html',
-                                args.doxdatadir,
-                                title=args.title,
-                                groups=groups,
-                                available_platforms=available_platforms,
-                                qch_enabled=args.qhp
-                                )
+                                          args.doxdatadir,
+                                          title=args.title,
+                                          groups=groups,
+                                          
available_platforms=available_platforms,
+                                          qch_enabled=args.qhp
+                                          )
     tmp_dir = tempfile.mkdtemp(prefix='kapidox-')
 
     try:
         if args.depdiagram_dot_dir:
             dot_files = utils.find_dot_files(args.depdiagram_dot_dir)
-            assert(dot_files)
+            assert dot_files
         for lib in libraries:
-            logging.info('# Generating doc for {}'.format(lib.fancyname))
+            logging.info(f'# Generating doc for {lib.fancyname}')
             if args.depdiagram_dot_dir:
                 png_path = os.path.join(tmp_dir, lib.name) + '.png'
                 ok = generator.generate_diagram(png_path, lib.fancyname,
-                                      dot_files, tmp_dir)
+                                                dot_files, tmp_dir)
                 if ok:
                     lib.dependency_diagram = png_path
 
@@ -98,8 +99,7 @@
 
         # Rebuild for interdependencies
         for lib in libraries:
-            logging.info('# Rebuilding {} for interdependencies'
-                         .format(lib.fancyname))
+            logging.info(f'# Rebuilding {lib.fancyname} for interdependencies')
             shutil.rmtree(lib.outputdir)
             ctx = generator.create_fw_context(args, lib, tagfiles, copyright)
             generator.gen_fw_apidocs(ctx, tmp_dir)
@@ -118,10 +118,9 @@
                 shutil.copy(product.logo_url_src, product.logo_url)
         generator.create_global_index(products)
         if args.qhp:
-            logging.info('# Merge qch files'
-                         .format(lib.fancyname))
+            logging.info('# Merge qch files')
             generator.create_qch(products, tagfiles)
-        logging.info("# Writing metadata... ")
+        logging.info("# Writing metadata...")
         with open('metadata.json', 'w') as file:
             json.dump(metalist, file)
         libs = []
@@ -132,6 +131,6 @@
         logging.info('# Done')
     finally:
         if args.keep_temp_dirs:
-            logging.info('Kept temp dir at {}'.format(tmp_dir))
+            logging.info(f'Kept temp dir at {tmp_dir}')
         else:
             shutil.rmtree(tmp_dir)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/kapidox_generate.py 
new/kapidox-5.94.0/kapidox/kapidox_generate.py
--- old/kapidox-5.93.0/kapidox/kapidox_generate.py      2022-03-13 
13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/kapidox_generate.py      2022-04-19 
11:14:35.000000000 +0200
@@ -57,7 +57,6 @@
                       "out*/trunk/kde-common/accounts and specify it with the "
                       "--accountsfile argument.")
 
-
     return maintainers
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/models.py 
new/kapidox-5.94.0/kapidox/models.py
--- old/kapidox-5.93.0/kapidox/models.py        2022-03-13 13:10:03.000000000 
+0100
+++ new/kapidox-5.94.0/kapidox/models.py        2022-04-19 11:14:35.000000000 
+0200
@@ -17,7 +17,7 @@
 class Library(object):
     """ Library
     """
-    
+
     def __init__(self, metainfo, products, platforms, all_maintainers):
         """
             Constructor of the Library object
@@ -57,19 +57,19 @@
             del metainfo['group']
             products[utils.serialize_name(metainfo['name'])] = 
Product(metainfo, all_maintainers)
             self.part_of_group = False
-            logging.warning("Group of {} not found: 
dropped.".format(metainfo['fancyname']))
+            logging.warning("Group of {fancyname} not found: 
dropped.".format_map(metainfo))
         self.product = products[utils.serialize_name(productname)]
         if self.product is None:
-            raise ValueError("'{}' does not belong to a product."
-                             .format(metainfo['name']))
+            raise ValueError("'{name}' does not belong to a product."
+                             .format_map(metainfo))
 
         if 'subgroup' in metainfo and self.part_of_group:
             for sp in self.product.subproducts:
                 if sp.name == utils.serialize_name(metainfo['subgroup']):
                     self.subproduct = sp
             if self.subproduct is None:
-                logging.warning("Subgroup {} of library {} not documented, 
subgroup will be None"
-                                .format(metainfo['subgroup'], 
metainfo['name']))
+                logging.warning("Subgroup {subgroup} of library {name} not 
documented, subgroup will be None"
+                                .format_map(metainfo))
 
         if self.subproduct is not None:
             self.parent = self.subproduct
@@ -115,7 +115,7 @@
     def _set_outputdir(self, grouped):
         outputdir = self.name
         if grouped:
-            outputdir = self.product.outputdir + '/' + outputdir
+            outputdir = os.path.join(self.product.outputdir, outputdir)
         return outputdir.lower()
 
 
@@ -181,14 +181,13 @@
             self.mailinglist = None
             self.part_of_group = False
         else:
-            raise ValueError("I do not recognize a product in {}."
-                             .format(metainfo['name']))
+            raise ValueError("I do not recognize a product in {name}."
+                             .format_map(metainfo))
 
     def _extract_subproducts(self, groupinfo):
         subproducts = []
         if 'subgroups' in groupinfo:
             for sg in groupinfo['subgroups']:
-                sg
                 if 'name' in sg:
                     subproducts.append(Subproduct(sg, self))
         return subproducts
@@ -196,7 +195,7 @@
     def _set_logo(self):
         if self.logo_url_src is not None:
             filename, ext = os.path.splitext(self.logo_url_src)
-            return self.outputdir + '/' + self.name + ext
+            return os.path.join(self.outputdir, self.name) + ext
         else:
             return None
 
@@ -212,16 +211,15 @@
         logo_url = os.path.join(path, 'logo.png')
         if os.path.isfile(logo_url):
             if defined_not_found:
-                logging.warning("Defined {} logo file doesn't exist, set back 
to found logo.png"
-                                .format(self.fancyname))
+                logging.warning(f'Defined {self.fancyname} logo file does not 
exist, set back to found logo.png')
             return logo_url
 
         if defined_not_found:
-            logging.warning("Defined {} logo file doesn't exist, set back to 
None"
-                            .format(self.fancyname))
+            logging.warning(f'Defined {self.fancyname} logo file does not 
exist, set back to None')
 
         return None
 
+
 class Subproduct(object):
     """ Subproduct
     """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/preprocessing.py 
new/kapidox-5.94.0/kapidox/preprocessing.py
--- old/kapidox-5.93.0/kapidox/preprocessing.py 2022-03-13 13:10:03.000000000 
+0100
+++ new/kapidox-5.94.0/kapidox/preprocessing.py 2022-04-19 11:14:35.000000000 
+0200
@@ -24,7 +24,6 @@
     "create_metainfo",
     "parse_tree")
 
-
 PLATFORM_ALL = "All"
 PLATFORM_UNKNOWN = "UNKNOWN"
 
@@ -61,7 +60,7 @@
                 dct[platform] = note
 
 
-def create_metainfo(path) -> Optional[Dict[str,Any]]:
+def create_metainfo(path) -> Optional[Dict[str, Any]]:
     """Look for a `metadata.yaml` file and create a dictionary out it.
 
     Args:
@@ -71,7 +70,7 @@
     did not fulfill some conditions.
     """
 
-    metainfo: Optional[Dict[str,Any]]
+    metainfo: Optional[Dict[str, Any]]
 
     if not os.path.isdir(path):
         return None
@@ -88,18 +87,15 @@
         metainfo = yaml.safe_load(open(metainfo_file))
     except Exception as e:
         print(e)
-        logging.warning('Could not load metainfo.yaml for {}, skipping it'
-                        .format(path))
+        logging.warning(f'Could not load metainfo.yaml for {path}, skipping 
it')
         return None
 
     if metainfo is None:
-        logging.warning('Empty metainfo.yaml for {}, skipping it'
-                        .format(path))
+        logging.warning(f'Empty metainfo.yaml for {path}, skipping it')
         return None
 
     if 'subgroup' in metainfo and 'group' not in metainfo:
-        logging.warning('Subgroup but no group in {}, skipping it'
-                        .format(path))
+        logging.warning(f'Subgroup but no group in {path}, skipping it')
         return None
 
     # Suppose we get a relative path passed in (e.g. on the command-line,
@@ -113,8 +109,7 @@
         fancyname = utils.parse_fancyname(path)
 
     if not fancyname:
-        logging.warning('Could not find fancy name for {}, skipping it'
-                        .format(path))
+        logging.warning(f'Could not find fancy name for {path}, skipping it')
         return None
     # A fancyname has 1st char capitalized
     fancyname = fancyname[0].capitalize() + fancyname[1:]
@@ -137,29 +132,29 @@
         'dependency_diagram': None,
         'path': path,
         'qdoc': qdoc,
-        })
+    })
 
     # replace legacy platform names
     if 'platforms' in metainfo:
         platforms = metainfo['platforms']
         for index, x in enumerate(platforms):
             if x['name'] == "MacOSX":
-                x['name'] = "macOS";
+                x['name'] = "macOS"
                 platforms[index] = x
-                logging.warning('{} uses outdated platform name, please 
replace "MacOSX" with "macOS".'
-                        .format(metainfo['fancyname']))
-        metainfo.update({ 'platforms': platforms })
+                logging.warning('{fancyname} uses outdated platform name, 
please replace "MacOSX" with "macOS".'
+                                .format_map(metainfo))
+        metainfo.update({'platforms': platforms})
     if 'group_info' in metainfo:
         group_info = metainfo['group_info']
         if 'platforms' in group_info:
             platforms = group_info['platforms']
             for index, x in enumerate(platforms):
                 if "MacOSX" in x:
-                    x = x.replace("MacOSX", "macOS");
+                    x = x.replace("MacOSX", "macOS")
                     platforms[index] = x
-                    logging.warning('Group {} uses outdated platform name, 
please replace "MacOSX" with "macOS".'
-                            .format(group_info['fancyname']))
-            group_info.update({ 'platforms': platforms })
+                    logging.warning('Group {fancyname} uses outdated platform 
name, please replace "MacOSX" with "macOS".'
+                                    .format_map(group_info))
+            group_info.update({'platforms': platforms})
 
     return metainfo
 
@@ -192,8 +187,7 @@
             if metainfo['public_lib'] or 'group_info' in metainfo:
                 metalist.append(metainfo)
             else:
-                logging.warning("{} has no public libraries"
-                                .format(metainfo['name']))
+                logging.warning('{name} has no public 
libraries'.format_map(metainfo))
 
     return metalist
 
@@ -203,8 +197,7 @@
     list.
 
     Args:
-        metalist: (list of dict) lists of the metainfo extracted in
-    parse_tree().
+        metalist: (list of dict) lists of the metainfo extracted in 
parse_tree().
         all_maintainers: (dict of dict)  all possible maintainers.
 
     Returns:
@@ -214,7 +207,7 @@
     products = dict()
     groups = []
     libraries = []
-    available_platforms = set(['Windows', 'macOS', 'Linux', 'Android', 
'FreeBSD'])
+    available_platforms = {'Windows', 'macOS', 'Linux', 'Android', 'FreeBSD'}
 
     # First extract the structural info
     for metainfo in metalist:
@@ -232,8 +225,8 @@
 
             available_platforms.update(set(platform_lst))
         except (KeyError, TypeError):
-            logging.warning('{} library lacks valid platform definitions'
-                            .format(metainfo['fancyname']))
+            logging.warning('{fancyname} library lacks valid platform 
definitions'
+                            .format_map(metainfo))
             platforms = [dict(name=PLATFORM_UNKNOWN)]
 
         dct = dict((x['name'], x.get('note', '')) for x in platforms)
@@ -260,7 +253,7 @@
 
     Args:
         metainfo: (dict) metainfo created by the create_metainfo() function.
-        all_maintainer: (dict of dict) all possible maintainers
+        all_maintainers: (dict of dict) all possible maintainers
 
     Returns:
         A Product or None if the metainfo does not describe a product.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/kapidox/utils.py 
new/kapidox-5.94.0/kapidox/utils.py
--- old/kapidox-5.93.0/kapidox/utils.py 2022-03-13 13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/kapidox/utils.py 2022-04-19 11:14:35.000000000 +0200
@@ -14,6 +14,7 @@
 import tempfile
 import requests
 
+
 ## @package kapidox.utils
 #
 # Multiple usage utils.
@@ -26,6 +27,7 @@
 # to run on builds.kde.org, which may not have all the required dependencies.
 #
 
+
 def setup_logging():
     FORMAT = '%(asctime)s %(levelname)s %(message)s'
     logging.basicConfig(format=FORMAT, datefmt='%H:%M:%S', level=logging.DEBUG)
@@ -46,6 +48,7 @@
     else:
         return None
 
+
 def set_repopath(id):
     """ Return the repopath for the repo id, queried from projects.kde.org
 
@@ -69,12 +72,12 @@
         # kitemviews is not recognized).
         return None
 
+
 def set_maintainers(maintainer_keys, all_maintainers):
     """ Expend the name of the maintainers.
 
     Args:
-        dictionary: (dict) Dictionary from which the name to expend will be 
read.
-        key: (string) Key of the dictionary where the name to expend is saved.
+        maintainer_keys: (string) Key of the dictionary where the name to 
expend is saved.
         all_maintainers: (dict of dict) Look-up table where the names and 
emails of
     the maintainers are stored.
 
@@ -87,14 +90,14 @@
                                'email: 'toto...@example.com'}
                       }
 
-        >>> set_maintainers(maintainer_keys, my_team)
+        >>> set_maintainers(maintainer_keys, all_maintainers)
     """
 
     if not maintainer_keys:
         maintainers = []
     elif isinstance(maintainer_keys, list):
         maintainers = map(lambda x: all_maintainers.get(x, None),
-                             maintainer_keys)
+                          maintainer_keys)
     else:
         maintainers = [all_maintainers.get(maintainer_keys, None)]
 
@@ -110,7 +113,7 @@
     cmakelists_path = os.path.join(fw_dir, "CMakeLists.txt")
     if not os.path.exists(cmakelists_path):
         for f in os.listdir(fw_dir):
-            if ".qbs" in f and not "Test" in f:
+            if ".qbs" in f and "Test" not in f:
                 return f[:-4]
         logging.error("No CMakeLists.txt in {}".format(fw_dir))
         return None
@@ -121,8 +124,7 @@
         if match:
             return match.group(1)
 
-    logging.error("Failed to find framework name: Could not find a "
-                  "'project()' command in {}.".format(cmakelists_path))
+    logging.error(f"Failed to find framework name: Could not find a 
'project()' command in {cmakelists_path}.")
     return None
 
 
@@ -141,8 +143,8 @@
             # NSUserDomainMask = 1
             # True for expanding the tilde into a fully qualified path
             cachedir = os.path.join(
-                    NSSearchPathForDirectoriesInDomains(14, 1, True)[0],
-                    'KApiDox')
+                NSSearchPathForDirectoriesInDomains(14, 1, True)[0],
+                'KApiDox')
         except:
             pass
     elif os.name == "posix":
@@ -230,6 +232,8 @@
 
 
 _KAPIDOX_VERSION = None
+
+
 def get_kapidox_version():
     """Get commit id of running code if it is running from git repository.
 
@@ -255,7 +259,7 @@
 
     git_HEAD = os.path.join(git_dir, "HEAD")
     if not os.path.isfile(git_HEAD):
-        logging.warning("Getting git info failed: {} is not a 
file".format(git_HEAD))
+        logging.warning(f'Getting git info failed: {git_HEAD} is not a file')
         return _KAPIDOX_VERSION
 
     try:
@@ -266,7 +270,7 @@
     except Exception as exc:
         # Catch all exceptions here: whatever fails in this function should not
         # cause the code to fail
-        logging.warning("Getting git info failed: {}".format(exc))
+        logging.warning(f'Getting git info failed: {exc}')
     return _KAPIDOX_VERSION
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/kapidox-5.93.0/setup.py new/kapidox-5.94.0/setup.py
--- old/kapidox-5.93.0/setup.py 2022-03-13 13:10:03.000000000 +0100
+++ new/kapidox-5.94.0/setup.py 2022-04-19 11:14:35.000000000 +0200
@@ -2,7 +2,7 @@
 
 setup(
         name='kapidox',
-        version='5.93.0',
+        version='5.94.0',
         description='KDE API documentation generation tools',
         maintainer='Olivier Churlaud',
         maintainer_email='oliv...@churlaud.com',

Reply via email to