commit:     9e24d0143450628f334cdb62e579efafd1bfd2ba
Author:     Kenneth Raplee <kenrap <AT> kennethraplee <DOT> com>
AuthorDate: Sat Apr  2 01:13:57 2022 +0000
Commit:     Sam James <sam <AT> gentoo <DOT> org>
CommitDate: Mon Apr  4 19:04:34 2022 +0000
URL:        https://gitweb.gentoo.org/proj/portage.git/commit/?id=9e24d014

Simplify with declarative programming

Signed-off-by: Kenneth Raplee <kenrap <AT> kennethraplee.com>
Signed-off-by: Sam James <sam <AT> gentoo.org>

 lib/portage/manifest.py | 144 ++++++++++++++++++++++++------------------------
 lib/portage/metadata.py |  15 ++---
 lib/portage/module.py   |  23 +++++---
 lib/portage/news.py     |  14 ++---
 lib/portage/output.py   |  13 ++---
 5 files changed, 105 insertions(+), 104 deletions(-)

diff --git a/lib/portage/manifest.py b/lib/portage/manifest.py
index 5472e8fb1..ff166faa8 100644
--- a/lib/portage/manifest.py
+++ b/lib/portage/manifest.py
@@ -3,6 +3,7 @@
 
 import errno
 import io
+import itertools
 import logging
 import re
 import stat
@@ -107,9 +108,8 @@ class Manifest2Entry(ManifestEntry):
         myhashkeys = list(self.hashes)
         myhashkeys.remove("size")
         myhashkeys.sort()
-        for h in myhashkeys:
-            myline += " " + h + " " + str(self.hashes[h])
-        return myline
+        with_hashes = " ".join(f"{h} {self.hashes[h]}" for h in myhashkeys)
+        return f"{myline} {with_hashes}"
 
     def __eq__(self, other):
         if (
@@ -162,7 +162,6 @@ class Manifest:
             find_invalid_path_char = _find_invalid_path_char
         self._find_invalid_path_char = find_invalid_path_char
         self.pkgdir = _unicode_decode(pkgdir).rstrip(os.sep) + os.sep
-        self.fhashdict = {}
         self.hashes = set()
         self.required_hashes = set()
 
@@ -182,8 +181,8 @@ class Manifest:
         self.required_hashes.update(required_hashes)
         self.required_hashes.intersection_update(self.hashes)
 
-        for t in MANIFEST2_IDENTIFIERS:
-            self.fhashdict[t] = {}
+        self.fhashdict = {t: {} for t in MANIFEST2_IDENTIFIERS}
+
         if not from_scratch:
             self._read()
         if fetchlist_dict != None:
@@ -206,9 +205,9 @@ class Manifest:
 
     def getDigests(self):
         """Compability function for old digest/manifest code, returns dict of 
filename:{hashfunction:hashvalue}"""
-        rval = {}
-        for t in MANIFEST2_IDENTIFIERS:
-            rval.update(self.fhashdict[t])
+        rval = {
+            k: v for t in MANIFEST2_IDENTIFIERS for k, v in 
self.fhashdict[t].items()
+        }
         return rval
 
     def getTypeDigests(self, ftype):
@@ -269,18 +268,16 @@ class Manifest:
 
     def _getDigestData(self, distlist):
         """create a hash dict for a specific list of files"""
-        myhashdict = {}
-        for myname in distlist:
-            for mytype in self.fhashdict:
-                if myname in self.fhashdict[mytype]:
-                    myhashdict.setdefault(mytype, {})
-                    myhashdict[mytype].setdefault(myname, {})
-                    
myhashdict[mytype][myname].update(self.fhashdict[mytype][myname])
+        myhashdict = {
+            mytype: {myname: self.fhashdict[mytype][myname]}
+            for myname in distlist
+            for mytype in self.fhashdict
+            if myname in self.fhashdict[mytype]
+        }
         return myhashdict
 
     def _createManifestEntries(self):
-        valid_hashes = set(get_valid_checksum_keys())
-        valid_hashes.add("size")
+        valid_hashes = set(itertools.chain(get_valid_checksum_keys(), 
("size")))
         mytypes = list(self.fhashdict)
         mytypes.sort()
         for t in mytypes:
@@ -296,13 +293,22 @@ class Manifest:
                 yield myentry
 
     def checkIntegrity(self):
-        for t in self.fhashdict:
-            for f in self.fhashdict[t]:
-                diff = 
self.required_hashes.difference(set(self.fhashdict[t][f]))
-                if diff:
-                    raise MissingParameter(
-                        _("Missing %s checksum(s): %s %s") % (" ".join(diff), 
t, f)
+        manifest_data = (
+            (
+                
self.required_hashes.difference(set(self.fhashdict[mytype][myfile])),
+                mytype,
+                myfile,
+            )
+            for mytype in self.fhashdict
+            for myfile in self.fhashdict[mytype]
+        )
+        for needed_hashes, its_type, its_file in manifest_data:
+            if needed_hashes:
+                raise MissingParameter(
+                    _(
+                        f"Missing {' '.join(needed_hashes)} checksum(s): 
{its_type} {its_file}"
                     )
+                )
 
     def write(self, sign=False, force=False):
         """Write Manifest instance to disk, optionally signing it. Returns
@@ -488,10 +494,8 @@ class Manifest:
 
     def findFile(self, fname):
         """Return entrytype of the given file if present in Manifest or None 
if not present"""
-        for t in MANIFEST2_IDENTIFIERS:
-            if fname in self.fhashdict[t]:
-                return t
-        return None
+        found_entries = (t for t in MANIFEST2_IDENTIFIERS if fname in 
self.fhashdict[t])
+        return next(found_entries, None)
 
     def create(
         self,
@@ -529,18 +533,19 @@ class Manifest:
             find_invalid_path_char=self._find_invalid_path_char,
             strict_misc_digests=self.strict_misc_digests,
         )
-        pn = os.path.basename(self.pkgdir.rstrip(os.path.sep))
-        cat = self._pkgdir_category()
 
-        pkgdir = self.pkgdir
+        update_pkgdir = self._update_thick_pkgdir
         if self.thin:
-            cpvlist = self._update_thin_pkgdir(cat, pn, pkgdir)
-        else:
-            cpvlist = self._update_thick_pkgdir(cat, pn, pkgdir)
+            update_pkgdir = self._update_thin_pkgdir
 
-        distlist = set()
-        for cpv in cpvlist:
-            distlist.update(self._getCpvDistfiles(cpv))
+        cpvlist = update_pkgdir(
+            self._pkgdir_category(),
+            os.path.basename(self.pkgdir.rstrip(os.path.sep)),
+            self.pkgdir,
+        )
+        distlist = set(
+            distfile for cpv in cpvlist for distfile in 
self._getCpvDistfiles(cpv)
+        )
 
         if requiredDistfiles is None:
             # This allows us to force removal of stale digests for the
@@ -550,9 +555,7 @@ class Manifest:
             # repoman passes in an empty list, which implies that all distfiles
             # are required.
             requiredDistfiles = distlist.copy()
-        required_hash_types = set()
-        required_hash_types.add("size")
-        required_hash_types.update(self.required_hashes)
+        required_hash_types = set(itertools.chain(self.required_hashes, 
("size")))
         for f in distlist:
             fname = os.path.join(self.distdir, f)
             mystat = None
@@ -599,25 +602,28 @@ class Manifest:
         return cpv
 
     def _update_thin_pkgdir(self, cat, pn, pkgdir):
-        for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
-            break
-        cpvlist = []
-        for f in pkgdir_files:
+        _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))
+
+        def _process_for_cpv(filename):
             try:
-                f = _unicode_decode(f, encoding=_encodings["fs"], 
errors="strict")
+                filename = _unicode_decode(
+                    filename, encoding=_encodings["fs"], errors="strict"
+                )
             except UnicodeDecodeError:
-                continue
-            if f[:1] == ".":
-                continue
-            pf = self._is_cpv(cat, pn, f)
+                return None
+            if filename.startswith("."):
+                return None
+            pf = self._is_cpv(cat, pn, filename)
             if pf is not None:
-                cpvlist.append(pf)
+                return pf
+
+        processed = (_process_for_cpv(filename) for filename in pkgdir_files)
+        cpvlist = [pf for pf in processed if pf]
         return cpvlist
 
     def _update_thick_pkgdir(self, cat, pn, pkgdir):
+        _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))
         cpvlist = []
-        for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
-            break
         for f in pkgdir_files:
             try:
                 f = _unicode_decode(f, encoding=_encodings["fs"], 
errors="strict")
@@ -714,9 +720,7 @@ class Manifest:
         return self.fetchlist_dict[cpv]
 
     def getDistfilesSize(self, fetchlist):
-        total_bytes = 0
-        for f in fetchlist:
-            total_bytes += int(self.fhashdict["DIST"][f]["size"])
+        total_bytes = sum(int(self.fhashdict["DIST"][f]["size"]) for f in 
fetchlist)
         return total_bytes
 
     def updateFileHashes(
@@ -784,28 +788,26 @@ class Manifest:
 
     def getVersions(self):
         """Returns a list of manifest versions present in the manifest file."""
-        rVal = []
         mfname = self.getFullname()
         if not os.path.exists(mfname):
-            return rVal
-        myfile = io.open(
+            return []
+        with io.open(
             _unicode_encode(mfname, encoding=_encodings["fs"], 
errors="strict"),
             mode="r",
             encoding=_encodings["repo.content"],
             errors="replace",
-        )
-        lines = myfile.readlines()
-        myfile.close()
-        for l in lines:
-            mysplit = l.split()
-            if (
-                len(mysplit) > 4
-                and mysplit[0] in MANIFEST2_IDENTIFIERS
-                and ((len(mysplit) - 3) % 2) == 0
-                and not 2 in rVal
-            ):
-                rVal.append(2)
-        return rVal
+        ) as myfile:
+            line_splits = (line.split() for line in myfile.readlines())
+            validation = (
+                True
+                for line_split in line_splits
+                if len(line_split) > 4
+                and line_split[0] in MANIFEST2_IDENTIFIERS
+                and (len(line_split) - 3) % 2 == 0
+            )
+            if any(validation):
+                return [2]
+        return []
 
     def _catsplit(self, pkg_key):
         """Split a category and package, returning a list of [cat, pkg].

diff --git a/lib/portage/metadata.py b/lib/portage/metadata.py
index 357917051..869c10bb3 100644
--- a/lib/portage/metadata.py
+++ b/lib/portage/metadata.py
@@ -75,8 +75,6 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
             eclass_db.update_eclasses()
             porttrees_data.append(TreeData(portdb.auxdb[path], eclass_db, 
path, src_db))
 
-    porttrees = [tree_data.path for tree_data in porttrees_data]
-
     quiet = (
         settings.get("TERM") == "dumb" or "--quiet" in myopts or not 
sys.stdout.isatty()
     )
@@ -100,7 +98,7 @@ def action_metadata(settings, portdb, myopts, 
porttrees=None):
     # Temporarily override portdb.porttrees so portdb.cp_all()
     # will only return the relevant subset.
     portdb_porttrees = portdb.porttrees
-    portdb.porttrees = porttrees
+    portdb.porttrees = (tree_data.path for tree_data in porttrees_data)
     try:
         cp_all = portdb.cp_all()
     finally:
@@ -119,7 +117,6 @@ def action_metadata(settings, portdb, myopts, 
porttrees=None):
 
     for cp in cp_all:
         for tree_data in porttrees_data:
-
             src_chf = tree_data.src_db.validation_chf
             dest_chf = tree_data.dest_db.validation_chf
             dest_chf_key = f"_{dest_chf}_"
@@ -190,11 +187,11 @@ def action_metadata(settings, portdb, myopts, 
porttrees=None):
                         # We don't want to skip the write unless we're really
                         # sure that the existing cache is identical, so don't
                         # trust _mtime_ and _eclasses_ alone.
-                        for k in auxdbkeys:
-                            if dest.get(k, "") != src.get(k, ""):
-                                dest = None
-                                break
-
+                        cache_is_identical = (
+                            True for k in auxdbkeys if dest.get(k, "") != 
src.get(k, "")
+                        )
+                        if any(cache_is_identical):
+                            dest = None
                 if dest is not None:
                     # The existing data is valid and identical,
                     # so there's no need to overwrite it.

diff --git a/lib/portage/module.py b/lib/portage/module.py
index 61c85aa47..8e63cd545 100644
--- a/lib/portage/module.py
+++ b/lib/portage/module.py
@@ -110,19 +110,27 @@ class Modules:
         @rtype: dictionary of module_plugins
         """
         module_dir = self._module_path
-        importables = []
         names = os.listdir(module_dir)
-        for entry in names:
-            # skip any __init__ or __pycache__ files or directories
-            if entry.startswith("__"):
-                continue
+
+        def _a_real_module(entry):
             try:
                 # test for statinfo to ensure it should a real module
                 # it will bail if it errors
                 os.lstat(os.path.join(module_dir, entry, "__init__.py"))
-                importables.append(entry)
             except EnvironmentError:
-                pass
+                return False
+            return True
+
+        # The importables list cannot be a generator.
+        # If it was a generator, it would be consumed by self.parents.extend()
+        # and the following for loop wouldn't have anything to iterate with.
+        importables = [
+            entry
+            for entry in names
+            if not entry.startswith("__") and _a_real_module(entry)
+        ]
+        self.parents.extend(importables)
+
         kids = {}
         for entry in importables:
             new_module = Module(entry, self._namepath)
@@ -131,7 +139,6 @@ class Modules:
                 kid = new_module.kids[module_name]
                 kid["parent"] = new_module
                 kids[kid["name"]] = kid
-            self.parents.append(entry)
         return kids
 
     def get_module_names(self):

diff --git a/lib/portage/news.py b/lib/portage/news.py
index 9ef6efde0..9f373d3d7 100644
--- a/lib/portage/news.py
+++ b/lib/portage/news.py
@@ -280,14 +280,12 @@ class NewsItem:
 
         kwargs = {"vardb": vardb, "config": config, "profile": profile}
 
-        all_match = True
-        for values in self.restrictions.values():
-            any_match = False
-            for restriction in values:
-                if restriction.checkRestriction(**kwargs):
-                    any_match = True
-            if not any_match:
-                all_match = False
+        all_match = all(
+            True
+            for values in self.restrictions.values()
+            for restriction in values
+            if restriction.checkRestriction(**kwargs)
+        )
 
         return all_match
 

diff --git a/lib/portage/output.py b/lib/portage/output.py
index 33c477012..e20046fc5 100644
--- a/lib/portage/output.py
+++ b/lib/portage/output.py
@@ -5,6 +5,7 @@ __docformat__ = "epytext"
 
 import errno
 import io
+import itertools
 import re
 import subprocess
 import sys
@@ -74,16 +75,12 @@ codes["bg_darkyellow"] = codes["bg_brown"]
 
 
 def color(fg, bg="default", attr=["normal"]):
-    mystr = codes[fg]
-    for x in [bg] + attr:
-        mystr += codes[x]
-    return mystr
+    myansicodechain = itertools.chain((codes[fg]), (codes[x] for x in [bg, 
*attr]))
+    return "".join(myansicodechain)
 
 
-ansi_codes = []
-for x in range(30, 38):
-    ansi_codes.append("%im" % x)
-    ansi_codes.append("%i;01m" % x)
+ansi_codes = [y for x in range(30, 38) for y in (f"{x}m", f"{x};01m")]
+
 
 rgb_ansi_colors = [
     "0x000000",

Reply via email to