When we touch repoXML and download a new repomd.xml,
don't immediately download all of the metadata.

YumPackageSack.populate() already implements that,
avoid duplicating the code.
---
 yum/yumRepo.py |  121 +-------------------------------------------------------
 1 files changed, 2 insertions(+), 119 deletions(-)

diff --git a/yum/yumRepo.py b/yum/yumRepo.py
index bd43d66..18fc37c 100644
--- a/yum/yumRepo.py
+++ b/yum/yumRepo.py
@@ -1318,129 +1318,12 @@ Insufficient space in download directory %s
 
         return local
 
-    def _commonRetrieveDataMD(self, mdtypes=None):
-        """ Retrieve any listed mdtypes, and revert if there was a failure.
-            Also put any of the non-valid mdtype files from the old_repo_XML
-            into the delete list, this means metadata can change filename
-            without us leaking it. """
-
-        def _mdtype_eq(omdtype, odata, nmdtype, ndata):
-            """ Check if two returns from _get_mdtype_data() are equal. """
-            if ndata is None:
-                return False
-            if omdtype != nmdtype:
-                return False
-            if odata.checksum != ndata.checksum:
-                return False
-            #  If we turn --unique-md-filenames on without chaning the data,
-            # then we'll get different filenames, but the same checksum.
-            #  Atm. just say they are different, to make sure we delete the
-            # old files.
-            orname = os.path.basename(odata.location[1])
-            nrname = os.path.basename(ndata.location[1])
-            if orname != nrname:
-                return False
-            return True
-
-        all_mdtypes = self.retrieved.keys()
-        if mdtypes is None:
-            mdtypes = all_mdtypes
-
-        reverts = []
-        if 'old_repo_XML' not in self._oldRepoMDData:
-            old_repo_XML = None
-        else:
-            old_repo_XML = self._oldRepoMDData['old_repo_XML']
-            self._oldRepoMDData['old_MD_files'] = reverts
-
-        # Inited twice atm. ... sue me
-        self._oldRepoMDData['new_MD_files'] = []
-        downloading_with_size = []
-        downloading_no_size   = []
-        for mdtype in all_mdtypes:
-            (nmdtype, ndata) = self._get_mdtype_data(mdtype)
-
-            if old_repo_XML:
-                (omdtype, odata) = self._get_mdtype_data(mdtype,
-                                                         repoXML=old_repo_XML)
-                local = self._groupCheckDataMDValid(odata, omdtype,mdtype,True)
-                if local:
-                    if _mdtype_eq(omdtype, odata, nmdtype, ndata):
-                        continue # If they are the same do nothing
-
-                    # Move this version, we _may_ get a new one.
-                    # We delete it on success, revert it back on failure.
-                    # We don't copy as we know it's bad due to above test.
-                    os.rename(local, local + '.old.tmp')
-                    reverts.append(local)
-
-                    #  This is the super easy way. We just to see if a 
generated
-                    # file is there for all files, but it should always work.
-                    #  And anyone who is giving us MD with blah and blah.sqlite
-                    # which are different types, can play a game I like to call
-                    # "come here, ouch".
-                    gen_local = local + '.sqlite'
-                    if os.path.exists(gen_local):
-                        os.rename(gen_local, gen_local + '.old.tmp')
-                        reverts.append(gen_local)
-
-            if ndata is None: # Doesn't exist in this repo
-                continue
-
-            if mdtype not in mdtypes:
-                continue
-
-            # No old repomd data, but we might still have uncompressed MD
-            if self._groupCheckDataMDValid(ndata, nmdtype, mdtype):
-                continue
-
-            if ndata.size is None:
-                downloading_no_size.append((ndata, nmdtype))
-            else:
-                downloading_with_size.append((ndata, nmdtype))
-
-        if len(downloading_with_size) == 1:
-            downloading_no_size.extend(downloading_with_size)
-            downloading_with_size = []
-
-        remote_size = 0
-        local_size  = 0
-        for (ndata, nmdtype) in downloading_with_size: # Get total size...
-            remote_size += int(ndata.size)
-
-        for (ndata, nmdtype) in downloading_with_size:
-            urlgrabber.progress.text_meter_total_size(remote_size, local_size)
-            if not self._retrieveMD(nmdtype, retrieve_can_fail=True):
-                self._revertOldRepoXML()
-                return False
-            local_size += int(ndata.size)
-        urlgrabber.progress.text_meter_total_size(0)
-        for (ndata, nmdtype) in downloading_no_size:
-            if not self._retrieveMD(nmdtype, retrieve_can_fail=True):
-                self._revertOldRepoXML()
-                return False
-
-        for (ndata, nmdtype) in downloading_with_size + downloading_no_size:
-            local = self._get_mdtype_fname(ndata, False)
-            if nmdtype.endswith("_db"): # Uncompress any compressed files
-                dl_local = local
-                local = misc.decompress(dl_local)
-                misc.unlink_f(dl_local)
-            self._oldRepoMDData['new_MD_files'].append(local)
-
-        self._doneOldRepoXML()
-        return True
-
     def _groupLoadRepoXML(self, text=None, mdtypes=None):
         """ Retrieve the new repomd.xml from the repository, then check it
             and parse it. If it fails we revert to the old version and pretend
-            that is fine. If the new repomd.xml requires new version of files
-            that we have, like updateinfo.xml, we download those too and if any
-            of those fail, we again revert everything and pretend old data is
-            good. """
+            that is fine. """
 
-        if self._commonLoadRepoXML(text):
-            self._commonRetrieveDataMD(mdtypes)
+        self._commonLoadRepoXML(text)
 
     def _mdpolicy2mdtypes(self):
         md_groups = {'instant'       : [],
-- 
1.7.4.4

_______________________________________________
Yum-devel mailing list
Yum-devel@lists.baseurl.org
http://lists.baseurl.org/mailman/listinfo/yum-devel

Reply via email to