jenkins-bot has submitted this change and it was merged. Change subject: (bug 64186) Decrease exhausing memory usage, speed up processing ......................................................................
(bug 64186) Decrease exhausing memory usage, speed up processing - delete local list after we are done with it - decrease processing speed: list.remove() consumes a lot of time because lists are not indexed and removing an item needs to search inside the sequence. There is an easy way for more speed when the parent category is just skipped inside the loop. This decreases time consumtion by 65% for depth:3 Change-Id: I067c2a0c95e86fa7367d6a3795962ec31fd5208c --- M scripts/category.py 1 file changed, 19 insertions(+), 22 deletions(-) Approvals: Ladsgroup: Looks good to me, approved jenkins-bot: Verified diff --git a/scripts/category.py b/scripts/category.py index e12fd6c..8922efe 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -876,41 +876,37 @@ """ - result = u'#' * currentDepth + ' ' + result = u'#' * currentDepth + if currentDepth > 0: + result += u' ' result += cat.title(asLink=True, textlink=True, withNamespace=False) result += ' (%d)' % len(self.catDB.getArticles(cat)) - # We will remove an element of supercats, but need the original set - # later, so we create a list from the catDB.getSupercats(cat) set - supercats = list(self.catDB.getSupercats(cat)) + if currentDepth < self.maxDepth / 2: + # noisy dots + pywikibot.output('.', newline=False) # Find out which other cats are supercats of the current cat - try: - supercats.remove(parent) - except: - pass - if supercats: - if currentDepth < self.maxDepth / 2: - # noisy dots - pywikibot.output('.', newline=False) - supercat_names = [] - for i, cat in enumerate(supercats): - # create a list of wiki links to the supercategories + supercat_names = [] + for cat in self.catDB.getSupercats(cat): + # create a list of wiki links to the supercategories + if cat != parent: supercat_names.append(cat.title(asLink=True, textlink=True, withNamespace=False)) - # print this list, separated with commas, using translations - # given in also_in_cats + if supercat_names: + # print this list, separated with commas, using translations + # given in also_in_cats result += ' ' + i18n.twtranslate(self.site, 'category-also-in', {'alsocat': ', '.join( supercat_names)}) + del supercat_names result += '\n' if currentDepth < self.maxDepth: for subcat in self.catDB.getSubcats(cat): # recurse into subdirectories result += self.treeview(subcat, currentDepth + 1, parent=cat) - else: - if self.catDB.getSubcats(cat): - # show that there are more categories beyond the depth limit - result += '#' * (currentDepth + 1) + ' [...]\n' + elif self.catDB.getSubcats(cat): + # show that there are more categories beyond the depth limit + result += '#' * (currentDepth + 1) + ' [...]\n' return result def run(self): @@ -1097,4 +1093,5 @@ except pywikibot.Error: pywikibot.error("Fatal error:", exc_info=True) finally: - catDB.dump() + if 'catDB' in globals(): + catDB.dump() -- To view, visit https://gerrit.wikimedia.org/r/128433 To unsubscribe, visit https://gerrit.wikimedia.org/r/settings Gerrit-MessageType: merged Gerrit-Change-Id: I067c2a0c95e86fa7367d6a3795962ec31fd5208c Gerrit-PatchSet: 4 Gerrit-Project: pywikibot/core Gerrit-Branch: master Gerrit-Owner: Xqt <i...@gno.de> Gerrit-Reviewer: Ladsgroup <ladsgr...@gmail.com> Gerrit-Reviewer: Merlijn van Deen <valhall...@arctus.nl> Gerrit-Reviewer: Pyfisch <pyfi...@gmail.com> Gerrit-Reviewer: Ricordisamoa <ricordisa...@live.it> Gerrit-Reviewer: Russell Blau <russb...@imapmail.org> Gerrit-Reviewer: jenkins-bot <> _______________________________________________ MediaWiki-commits mailing list MediaWiki-commits@lists.wikimedia.org https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits