http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11169

Revision: 11169
Author:   xqt
Date:     2013-03-03 10:49:02 +0000 (Sun, 03 Mar 2013)
Log Message:
-----------
some more PEP8 changes

Modified Paths:
--------------
    trunk/pywikipedia/interwiki.py

Modified: trunk/pywikipedia/interwiki.py
===================================================================
--- trunk/pywikipedia/interwiki.py      2013-03-03 08:46:05 UTC (rev 11168)
+++ trunk/pywikipedia/interwiki.py      2013-03-03 10:49:02 UTC (rev 11169)
@@ -835,13 +835,15 @@
 
         # Remember the "origin page"
         self.originPage = originPage
+        self.repoPage = None
         # todo is a list of all pages that still need to be analyzed.
         # Mark the origin page as todo.
         self.todo = PageTree()
         if originPage:
             self.todo.add(originPage)
         if globalvar.repository:
-            self.todo.add(pywikibot.DataPage(originPage))
+            self.repoPage = pywikibot.DataPage(originPage)
+            self.todo.add(self.repoPage)
 
         # done is a list of all pages that have been analyzed and that
         # are known to belong to this subject.
@@ -1593,15 +1595,15 @@
                     else:
                         #TODO: allow answer to repeat previous or go back 
after a mistake
                         answer = pywikibot.inputChoice(u'What should be 
done?', ['accept', 'reject', 'give up', 'accept all'], ['a', 'r', 'g', 'l'], 
'a')
-                    if answer == 'l': # accept all
+                    if answer == 'l':  # accept all
                         acceptall = True
                         answer = 'a'
-                    if answer == 'a': # accept this one
+                    if answer == 'a':  # accept this one
                         result[site] = pages[0]
                         break
-                    elif answer == 'g': # give up
+                    elif answer == 'g':  # give up
                         return None
-                    elif answer == 'r': # reject
+                    elif answer == 'r':  # reject
                         # None acceptable
                         break
         return result
@@ -1630,7 +1632,7 @@
             t2 = (((int(time2[0:4]) * 12 + int(time2[4:6])) * 30 +
                    int(time2[6:8])) * 24 + int(time2[8:10])) * 60 + \
                    int(time2[10:12])
-            return abs(t2-t1)
+            return abs(t2 - t1)
 
         if not self.isDone():
             raise "Bugcheck: finish called before done"
@@ -1658,7 +1660,7 @@
         pywikibot.output(u"======Post-processing %s======" % self.originPage)
         # Assemble list of accepted interwiki links
         new = self.assemble()
-        if new is None: # User said give up
+        if new is None:  # User said give up
             pywikibot.output(u"======Aborted processing %s======"
                              % self.originPage)
             return
@@ -1683,7 +1685,7 @@
             frgnSiteDone = False
 
             for siteCode in lclSite.family.languages_by_size:
-                site = pywikibot.getSite(code = siteCode)
+                site = pywikibot.getSite(siteCode)
                 if (not lclSiteDone and site == lclSite) or \
                    (not frgnSiteDone and site != lclSite and site in new):
                     if site == lclSite:
@@ -1693,14 +1695,14 @@
                             if self.replaceLinks(new[site], new):
                                 updatedSites.append(site)
                             if site != lclSite:
-                                 frgnSiteDone = True
+                                frgnSiteDone = True
                         except SaveError:
                             notUpdatedSites.append(site)
                         except GiveUpOnPage:
                             break
                 elif not globalvar.strictlimittwo and site in new \
                      and site != lclSite:
-                    old={}
+                    old = {}
                     try:
                         for page in new[site].interwiki():
                             old[page.site] = page
@@ -1709,12 +1711,12 @@
                                          % new[site])
                         continue
                     mods, mcomment, adding, removing, modifying \
-                          = compareLanguages(old, new, insite = lclSite)
+                          = compareLanguages(old, new, insite=lclSite)
                     if (len(removing) > 0 and not globalvar.autonomous) or \
                        (len(modifying) > 0 and self.problemfound) or \
                        len(old) == 0 or \
-                       (globalvar.needlimit and \
-                        len(adding) + len(modifying) >= globalvar.needlimit 
+1):
+                       (globalvar.needlimit and
+                        len(adding) + len(modifying) >= globalvar.needlimit + 
1):
                         try:
                             if self.replaceLinks(new[site], new):
                                 updatedSites.append(site)
@@ -1741,7 +1743,7 @@
                    (page.site.sitename() == 'wikipedia:is' or
                     page.site.sitename() == 'wikipedia:zh' and
                     page.namespace() == 10):
-                    old={}
+                    old = {}
                     try:
                         for mypage in new[page.site].interwiki():
                             old[mypage.site] = mypage
@@ -1767,7 +1769,7 @@
                             diff = minutesDiff(page.editTime(),
                                                time.strftime("%Y%m%d%H%M%S",
                                                              time.gmtime()))
-                            if diff > 30*24*60:
+                            if diff > 30 * 24 * 60:
                                 smallWikiAllowed = True
                             else:
                                 pywikibot.output(
@@ -1896,7 +1898,7 @@
                 del new[stmp]
 
         # Put interwiki links into a map
-        old={}
+        old = {}
         for page2 in interwikis:
             old[page2.site] = page2
 
@@ -1921,7 +1923,7 @@
                    globalvar.cleanup and \
                    unicode(rmPage) not in globalvar.remove or \
                    rmPage.site.lang in ['hak', 'hi', 'cdo', 'sa'] and \
-                   pywikibot.unicode_error: #work-arround for bug #3081100 (do 
not remove affected pages)
+                   pywikibot.unicode_error:  # work-arround for bug #3081100 
(do not remove affected pages)
                     new[rmsite] = rmPage
                     pywikibot.output(
                         u"WARNING: %s is either deleted or has a mismatching 
disambiguation state."
@@ -2122,6 +2124,7 @@
         except (socket.error, IOError):
             pywikibot.output(u'ERROR: could not report backlinks')
 
+
 class InterwikiBot(object):
     """A class keeping track of a list of subjects, controlling which pages
        are queried from which languages when."""
@@ -2138,32 +2141,34 @@
         self.pageGenerator = None
         self.generated = 0
 
-    def add(self, page, hints = None):
+    def add(self, page, hints=None):
         """Add a single subject to the list"""
-        subj = Subject(page, hints = hints)
+        subj = Subject(page, hints=hints)
         self.subjects.append(subj)
         for site, count in subj.openSites():
             # Keep correct counters
             self.plus(site, count)
 
-    def setPageGenerator(self, pageGenerator, number = None, until = None):
+    def setPageGenerator(self, pageGenerator, number=None, until=None):
         """Add a generator of subjects. Once the list of subjects gets
            too small, this generator is called to produce more Pages"""
         self.pageGenerator = pageGenerator
         self.generateNumber = number
         self.generateUntil = until
 
-    def dump(self, append = True):
+    def dump(self, append=True):
         site = pywikibot.getSite()
-        dumpfn = pywikibot.config.datafilepath(
-                     'interwiki-dumps',
-                     'interwikidump-%s-%s.txt' % (site.family.name, site.lang))
-        if append: mode = 'appended'
-        else: mode = 'written'
+        dumpfn = pywikibot.config.datafilepath('interwiki-dumps',
+                                               'interwikidump-%s-%s.txt'
+                                               % (site.family.name, site.lang))
+        if append:
+            mode = 'appended'
+        else:
+            mode = 'written'
         f = codecs.open(dumpfn, mode[0], 'utf-8')
         for subj in self.subjects:
             if subj.originPage:
-                f.write(subj.originPage.title(asLink=True)+'\n')
+                f.write(subj.originPage.title(asLink=True) + '\n')
         f.close()
         pywikibot.output(u'Dump %s (%s) %s.' % (site.lang, site.family.name, 
mode))
         return dumpfn
@@ -2220,10 +2225,10 @@
                 if self.generateUntil:
                     until = self.generateUntil
                     if page.site.lang not in page.site.family.nocapitalize:
-                        until = until[0].upper()+until[1:]
+                        until = until[0].upper() + until[1:]
                     if page.title(withNamespace=False) > until:
                         raise StopIteration
-                self.add(page, hints = globalvar.hints)
+                self.add(page, hints=globalvar.hints)
                 self.generated += 1
                 if self.generateNumber:
                     if self.generated >= self.generateNumber:
@@ -2272,12 +2277,14 @@
             # Can we make more home-language queries by adding subjects?
             if self.pageGenerator and mycount < globalvar.maxquerysize:
                 timeout = 60
-                while timeout<3600:
+                while timeout < 3600:
                     try:
                         self.generateMore(globalvar.maxquerysize - mycount)
                     except pywikibot.ServerError:
                         # Could not extract allpages special page?
-                        pywikibot.output(u'ERROR: could not retrieve more 
pages. Will try again in %d seconds'%timeout)
+                        pywikibot.output(
+                            u'ERROR: could not retrieve more pages. Will try 
again in %d seconds'
+                            % timeout)
                         time.sleep(timeout)
                         timeout *= 2
                     else:
@@ -2335,7 +2342,7 @@
     def queryStep(self):
         self.oneQuery()
         # Delete the ones that are done now.
-        for i in xrange(len(self.subjects)-1, -1, -1):
+        for i in xrange(len(self.subjects) - 1, -1, -1):
             subj = self.subjects[i]
             if subj.isDone():
                 subj.finish()
@@ -2365,6 +2372,7 @@
     def __len__(self):
         return len(self.subjects)
 
+
 def compareLanguages(old, new, insite):
 
     oldiw = set(old)
@@ -2416,7 +2424,8 @@
 
     return mods, mcomment, adding, removing, modifying
 
-def botMayEdit (page):
+
+def botMayEdit(page):
     tmpl = []
     try:
         tmpl, loc = moved_links[page.site.lang]
@@ -2430,23 +2439,28 @@
         pass
     tmpl += ignoreTemplates['_default']
     if tmpl != []:
-        templates = page.templatesWithParams(get_redirect=True);
+        templates = page.templatesWithParams(get_redirect=True)
         for template in templates:
             if template[0].lower() in tmpl:
                 return False
     return True
 
+
 def readWarnfile(filename, bot):
     import warnfile
     reader = warnfile.WarnfileReader(filename)
     # we won't use removeHints
     (hints, removeHints) = reader.getHints()
     for page, pagelist in hints.iteritems():
-        # The WarnfileReader gives us a list of pagelinks, but 
titletranslate.py expects a list of strings, so we convert it back.
-        # TODO: This is a quite ugly hack, in the future we should maybe make 
titletranslate expect a list of pagelinks.
-        hintStrings = ['%s:%s' % (hintedPage.site.language(), 
hintedPage.title()) for hintedPage in pagelist]
-        bot.add(page, hints = hintStrings)
+        # The WarnfileReader gives us a list of pagelinks, but 
titletranslate.py
+        # expects a list of strings, so we convert it back.
+        # TODO: This is a quite ugly hack, in the future we should maybe make
+        # titletranslate expect a list of pagelinks.
+        hintStrings = ['%s:%s' % (hintedPage.site.language(),
+                                  hintedPage.title()) for hintedPage in 
pagelist]
+        bot.add(page, hints=hintStrings)
 
+
 def main():
     singlePageTitle = []
     opthintsonly = False
@@ -2462,7 +2476,7 @@
     optContinue = False
     optRestore = False
     restoredFiles = []
-    File2Restore  = []
+    File2Restore = []
     dumpFileName = ''
     append = True
     newPages = None
@@ -2532,7 +2546,7 @@
         site = pywikibot.getSite()
         try:
             mainpagename = site.siteinfo()['mainpage']
-        except TypeError: #pywikibot module handle
+        except TypeError:  # pywikibot module handle
             mainpagename = site.siteinfo['mainpage']
         globalvar.skip.add(pywikibot.Page(site, mainpagename))
     except pywikibot.Error:
@@ -2552,7 +2566,8 @@
             namespaces = []
         else:
             ns = 'all'
-        hintlessPageGen = pagegenerators.NewpagesPageGenerator(newPages, 
namespace=ns)
+        hintlessPageGen = pagegenerators.NewpagesPageGenerator(newPages,
+                                                               namespace=ns)
 
     elif optRestore or optContinue or globalvar.restoreAll:
         site = pywikibot.getSite()
@@ -2561,7 +2576,7 @@
             for FileName in glob.iglob('interwiki-dumps/interwikidump-*.txt'):
                 s = FileName.split('\\')[1].split('.')[0].split('-')
                 sitename = s[1]
-                for i in xrange(0,2):
+                for i in xrange(0, 2):
                     s.remove(s[0])
                 sitelang = '-'.join(s)
                 if site.family.name == sitename:
@@ -2569,16 +2584,19 @@
         else:
             File2Restore.append([site.family.name, site.lang])
         for sitename, sitelang in File2Restore:
-            dumpfn = pywikibot.config.datafilepath(
-                               'interwiki-dumps',
-                               u'interwikidump-%s-%s.txt'
-                                 % (sitename, sitelang))
-            pywikibot.output(u'Reading interwikidump-%s-%s.txt' % (sitename, 
sitelang))
+            dumpfn = pywikibot.config.datafilepath('interwiki-dumps',
+                                                   u'interwikidump-%s-%s.txt'
+                                                   % (sitename, sitelang))
+            pywikibot.output(u'Reading interwikidump-%s-%s.txt'
+                             % (sitename, sitelang))
             site = pywikibot.getSite(sitelang, sitename)
             if not hintlessPageGen:
-                hintlessPageGen = pagegenerators.TextfilePageGenerator(dumpfn, 
site)
+                hintlessPageGen = pagegenerators.TextfilePageGenerator(dumpfn,
+                                                                       site)
             else:
-                hintlessPageGen = 
pagegenerators.CombinedPageGenerator([hintlessPageGen,pagegenerators.TextfilePageGenerator(dumpfn,
 site)])
+                hintlessPageGen = pagegenerators.CombinedPageGenerator(
+                    [hintlessPageGen,
+                     pagegenerators.TextfilePageGenerator(dumpfn, site)])
             restoredFiles.append(dumpfn)
         if hintlessPageGen:
             hintlessPageGen = 
pagegenerators.DuplicateFilterPageGenerator(hintlessPageGen)
@@ -2594,10 +2612,14 @@
                     nextPage = lastPage
                     namespace = page.namespace()
             if nextPage == "!":
-                pywikibot.output(u"Dump file is empty?! Starting at the 
beginning.")
+                pywikibot.output(
+                    u"Dump file is empty?! Starting at the beginning.")
             else:
                 nextPage += '!'
-            hintlessPageGen = 
pagegenerators.CombinedPageGenerator([hintlessPageGen, 
pagegenerators.AllpagesPageGenerator(nextPage, namespace, includeredirects = 
False)])
+            hintlessPageGen = pagegenerators.CombinedPageGenerator(
+                [hintlessPageGen,
+                 pagegenerators.AllpagesPageGenerator(nextPage, namespace,
+                                                      includeredirects=False)])
         if not hintlessPageGen:
             pywikibot.output(u'No Dumpfiles found.')
             return
@@ -2610,7 +2632,7 @@
         if len(namespaces) > 0:
             hintlessPageGen = 
pagegenerators.NamespaceFilterPageGenerator(hintlessPageGen, namespaces)
         # we'll use iter() to create make a next() function available.
-        bot.setPageGenerator(iter(hintlessPageGen), number = number, 
until=until)
+        bot.setPageGenerator(iter(hintlessPageGen), number=umber, until=until)
     elif warnfile:
         # TODO: filter namespaces if -namespace parameter was used
         readWarnfile(warnfile, bot)
@@ -2622,7 +2644,7 @@
             singlePage = pywikibot.Page(pywikibot.getSite(), singlePageTitle)
         else:
             singlePage = None
-        bot.add(singlePage, hints = globalvar.hints)
+        bot.add(singlePage, hints=globalvar.hints)
 
     try:
         try:
@@ -2649,7 +2671,7 @@
                 pass
 
 #===========
-globalvar=Global()
+globalvar = Global()
 
 if __name__ == "__main__":
     try:


_______________________________________________
Pywikipedia-svn mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-svn

Reply via email to