http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11162
Revision: 11162
Author: xqt
Date: 2013-03-02 14:47:05 +0000 (Sat, 02 Mar 2013)
Log Message:
-----------
some PEP8 changes, update from trunk r11161
Modified Paths:
--------------
branches/rewrite/scripts/interwiki.py
Modified: branches/rewrite/scripts/interwiki.py
===================================================================
--- branches/rewrite/scripts/interwiki.py 2013-03-02 14:46:09 UTC (rev
11161)
+++ branches/rewrite/scripts/interwiki.py 2013-03-02 14:47:05 UTC (rev
11162)
@@ -333,7 +333,7 @@
# (C) Rob W.W. Hooft, 2003
# (C) Daniel Herding, 2004
# (C) Yuri Astrakhan, 2005-2006
-# (C) xqt, 2009-2012
+# (C) xqt, 2009-2013
# (C) Pywikipedia bot team, 2007-2013
#
# Distributed under the terms of the MIT license.
@@ -341,7 +341,10 @@
__version__ = '$Id$'
#
-import sys, copy, re, os
+import sys
+import copy
+import re
+import os
import time
import codecs
import pickle
@@ -359,101 +362,96 @@
'&pagegenerators_help;': pagegenerators.parameterHelp
}
+
class SaveError(pywikibot.Error):
"""
An attempt to save a page with changed interwiki has failed.
"""
+
class LinkMustBeRemoved(SaveError):
"""
An interwiki link has to be removed, but this can't be done because of user
preferences or because the user chose not to change the page.
"""
+
class GiveUpOnPage(pywikibot.Error):
"""
The user chose not to work on this page and its linked pages any more.
"""
+
# Subpage templates. Must be in lower case,
# whereas subpage itself must be case sensitive
moved_links = {
- 'ar' : ([u'documentation',
- u'template documentation',
- u'شرح',
- u'توثيق'], u'/doc'),
- 'bn' : (u'documentation', u'/doc'),
- 'ca' : (u'ús de la plantilla', u'/ús'),
- 'cs' : (u'dokumentace', u'/doc'),
- 'da' : (u'dokumentation', u'/doc'),
- 'de' : (u'dokumentation', u'/Meta'),
+ 'ar': ([u'documentation', u'template documentation', u'شرح', u'توثيق'],
+ u'/doc'),
+ 'bn': (u'documentation', u'/doc'),
+ 'ca': (u'ús de la plantilla', u'/ús'),
+ 'cs': (u'dokumentace', u'/doc'),
+ 'da': (u'dokumentation', u'/doc'),
+ 'de': (u'dokumentation', u'/Meta'),
'dsb': ([u'dokumentacija', u'doc'], u'/Dokumentacija'),
- 'en' : ([u'documentation',
- u'template documentation',
- u'template doc',
- u'doc',
- u'documentation, template'], u'/doc'),
- 'es' : ([u'documentación', u'documentación de plantilla'], u'/doc'),
- 'eu' : (u'txantiloi dokumentazioa', u'/dok'),
- 'fa' : ([u'documentation',
- u'template documentation',
- u'template doc',
- u'doc',
- u'توضیحات',
- u'زیرصفحه توضیحات'], u'/doc'),
+ 'en': ([u'documentation', u'template documentation', u'template doc',
+ u'doc', u'documentation, template'], u'/doc'),
+ 'es': ([u'documentación', u'documentación de plantilla'], u'/doc'),
+ 'eu': (u'txantiloi dokumentazioa', u'/dok'),
+ 'fa': ([u'documentation', u'template documentation', u'template doc',
+ u'doc', u'توضیحات', u'زیرصفحه توضیحات'], u'/doc'),
# fi: no idea how to handle this type of subpage at :Metasivu:
- 'fi' : (u'mallineohje', None),
- 'fr' : ([u'/documentation', u'documentation', u'doc_modèle',
- u'documentation modèle', u'documentation modèle compliqué',
- u'documentation modèle en sous-page',
- u'documentation modèle compliqué en sous-page',
- u'documentation modèle utilisant les parserfunctions en
sous-page',
+ 'fi': (u'mallineohje', None),
+ 'fr': ([u'/documentation', u'documentation', u'doc_modèle',
+ u'documentation modèle', u'documentation modèle compliqué',
+ u'documentation modèle en sous-page',
+ u'documentation modèle compliqué en sous-page',
+ u'documentation modèle utilisant les parserfunctions en sous-page',
],
- u'/Documentation'),
+ u'/Documentation'),
'hsb': ([u'dokumentacija', u'doc'], u'/Dokumentacija'),
- 'hu' : (u'sablondokumentáció', u'/doc'),
- 'id' : (u'template doc', u'/doc'),
+ 'hu': (u'sablondokumentáció', u'/doc'),
+ 'id': (u'template doc', u'/doc'),
'ilo': (u'documentation', u'/doc'),
- 'ja' : (u'documentation', u'/doc'),
- 'ka' : (u'თარგის ინფო', u'/ინფო'),
- 'ko' : (u'documentation', u'/설명문서'),
- 'ms' : (u'documentation', u'/doc'),
- 'no' : (u'dokumentasjon', u'/dok'),
- 'nn' : (u'dokumentasjon', u'/dok'),
- 'pl' : (u'dokumentacja', u'/opis'),
- 'pt' : ([u'documentação', u'/doc'], u'/doc'),
- 'ro' : (u'documentaţie', u'/doc'),
- 'ru' : (u'doc', u'/doc'),
+ 'ja': (u'documentation', u'/doc'),
+ 'ka': (u'თარგის ინფო', u'/ინფო'),
+ 'ko': (u'documentation', u'/설명문서'),
+ 'ms': (u'documentation', u'/doc'),
+ 'no': (u'dokumentasjon', u'/dok'),
+ 'nn': (u'dokumentasjon', u'/dok'),
+ 'pl': (u'dokumentacja', u'/opis'),
+ 'pt': ([u'documentação', u'/doc'], u'/doc'),
+ 'ro': (u'documentaţie', u'/doc'),
+ 'ru': (u'doc', u'/doc'),
'simple': ([u'documentation',
u'template documentation',
u'template doc',
u'doc',
u'documentation, template'], u'/doc'),
- 'sv' : (u'dokumentation', u'/dok'),
- 'uk' : ([u'документація',
- u'doc',
- u'documentation'], u'/Документація'),
- 'vi' : (u'documentation', u'/doc'),
- 'zh' : ([u'documentation', u'doc'], u'/doc'),
+ 'sk': (u'dokumentácia', u'/Dokumentácia'),
+ 'sv': (u'dokumentation', u'/dok'),
+ 'uk': ([u'документація', u'doc', u'documentation'], u'/Документація'),
+ 'vi': (u'documentation', u'/doc'),
+ 'zh': ([u'documentation', u'doc'], u'/doc'),
}
# A list of template names in different languages.
# Pages which contain these shouldn't be changed.
ignoreTemplates = {
'_default': [u'delete'],
- 'ar' : [u'قيد الاستخدام'],
- 'cs' : [u'Pracuje_se'],
- 'de' : [u'inuse', 'in use', u'in bearbeitung', u'inbearbeitung',
- u'löschen', u'sla',
- u'löschantrag', u'löschantragstext',
- u'falschschreibung',
- u'obsolete schreibung', 'veraltete schreibweise'],
- 'en' : [u'inuse', u'softredirect'],
- 'fa' : [u'در دست ویرایش ۲', u'حذف سریع'],
+ 'ar': [u'قيد الاستخدام'],
+ 'cs': [u'Pracuje_se'],
+ 'de': [u'inuse', 'in use', u'in bearbeitung', u'inbearbeitung',
+ u'löschen', u'sla',
+ u'löschantrag', u'löschantragstext',
+ u'falschschreibung',
+ u'obsolete schreibung', 'veraltete schreibweise'],
+ 'en': [u'inuse', u'softredirect'],
+ 'fa': [u'در دست ویرایش ۲', u'حذف سریع'],
'pdc': [u'lösche'],
- 'zh' : [u'inuse'],
+ 'zh': [u'inuse'],
}
+
class Global(object):
"""
Container class for global settings.
@@ -495,9 +493,9 @@
contentsondisk = config.interwiki_contents_on_disk
lacklanguage = None
minlinks = 0
- quiet = False
+ quiet = False
restoreAll = False
- async = False
+ async = False
summary = u''
def readOptions(self, arg):
@@ -509,9 +507,12 @@
elif arg.startswith('-hintfile'):
hintfilename = arg[10:]
if (hintfilename is None) or (hintfilename == ''):
- hintfilename = pywikibot.input(u'Please enter the hint
filename:')
+ hintfilename = pywikibot.input(
+ u'Please enter the hint filename:')
f = codecs.open(hintfilename, 'r', config.textfile_encoding)
- R = re.compile(ur'\[\[(.+?)(?:\]\]|\|)') # hint or title ends
either before | or before ]]
+
+ # hint or title ends either before | or before ]]
+ R = re.compile(ur'\[\[(.+?)(?:\]\]|\|)')
for pageTitle in R.findall(f.read()):
self.hints.append(pageTitle)
f.close()
@@ -569,7 +570,7 @@
elif arg.startswith('-neverlink:'):
self.neverlink += arg[11:].split(",")
elif arg.startswith('-ignore:'):
- self.ignore += [pywikibot.Page(None,p) for p in arg[8:].split(",")]
+ self.ignore += [pywikibot.Page(None, p) for p in
arg[8:].split(",")]
elif arg.startswith('-ignorefile:'):
ignorefile = arg[12:]
ignorePageGen = pagegenerators.TextfilePageGenerator(ignorefile)
@@ -599,7 +600,8 @@
self.async = True
elif arg.startswith('-summary'):
if len(arg) == 8:
- self.summary = pywikibot.input(u'What summary do you want to
use?')
+ self.summary = pywikibot.input(
+ u'What summary do you want to use?')
else:
self.summary = arg[9:]
elif arg.startswith('-lack:'):
@@ -613,6 +615,7 @@
return False
return True
+
class StoredPage(pywikibot.Page):
"""
Store the Page contents on disk to avoid sucking too much
@@ -629,20 +632,20 @@
SPstore = None
# attributes created by pywikibot.Page.__init__
- SPcopy = [ '_editrestriction',
- '_site',
- '_namespace',
- '_section',
- '_title',
- 'editRestriction',
- 'moveRestriction',
- '_permalink',
- '_userName',
- '_ipedit',
- '_editTime',
- '_startTime',
- '_revisionId',
- '_deletedRevs' ]
+ SPcopy = ['_editrestriction',
+ '_site',
+ '_namespace',
+ '_section',
+ '_title',
+ 'editRestriction',
+ 'moveRestriction',
+ '_permalink',
+ '_userName',
+ '_ipedit',
+ '_editTime',
+ '_startTime',
+ '_revisionId',
+ '_deletedRevs']
def SPdeleteStore():
if StoredPage.SPpath:
@@ -680,6 +683,7 @@
_contents = property(SPgetContents, SPsetContents, SPdelContents)
+
class PageTree(object):
"""
Structure to manipulate a set of pages.
@@ -758,6 +762,7 @@
for page in plist:
yield page
+
class Subject(object):
"""
Class to follow the progress of a single 'subject' (i.e. a page with
@@ -808,9 +813,8 @@
done <- NL(pending) U done
return done
-
- Subject objects only operate on pages that should have been preloaded
before.
- In fact, at any time:
+ Subject objects only operate on pages that should have been preloaded
+ before. In fact, at any time:
* todo contains new Pages that have not been loaded yet
* done contains Pages that have been loaded, and that have been treated.
* If batch preloadings are successful, Page._get() is never called from
@@ -841,7 +845,7 @@
# As we haven't yet found a page that links to the origin page, we
# start with an empty list for it.
if originPage:
- self.foundIn = {self.originPage:[]}
+ self.foundIn = {self.originPage: []}
else:
self.foundIn = {}
# This is a list of all pages that are currently scheduled for
@@ -869,7 +873,6 @@
for page in tree.filter(site):
if page.exists() and page.isDisambig():
return page
- return None
def getFoundNonDisambig(self, site):
"""
@@ -880,10 +883,9 @@
"""
for tree in [self.done, self.pending]:
for page in tree.filter(site):
- if page.exists() and not page.isDisambig() \
- and not page.isRedirectPage() and not
page.isCategoryRedirect():
+ if page.exists() and not page.isDisambig() and \
+ not page.isRedirectPage() and not page.isCategoryRedirect():
return page
- return None
def getFoundInCorrectNamespace(self, site):
"""
@@ -894,25 +896,36 @@
"""
for tree in [self.done, self.pending, self.todo]:
for page in tree.filter(site):
- # -hintsonly: before we have an origin page, any namespace
will do.
- if self.originPage and page.namespace() ==
self.originPage.namespace():
- if page.exists() and not page.isRedirectPage() and not
page.isCategoryRedirect():
+ # -hintsonly: before we have an origin page, any namespace will
+ # do.
+ if self.originPage and \
+ page.namespace() == self.originPage.namespace():
+ if page.exists() and not \
+ page.isRedirectPage() and not page.isCategoryRedirect():
return page
- return None
- def translate(self, hints = None, keephintedsites = False):
+ def translate(self, hints=None, keephintedsites=False):
"""Add the given translation hints to the todo list"""
if globalvar.same and self.originPage:
if hints:
- pages = titletranslate.translate(self.originPage, hints =
hints + ['all:'],
- auto = globalvar.auto, removebrackets =
globalvar.hintnobracket)
+ pages = titletranslate.translate(
+ self.originPage,
+ hints=hints + ['all:'],
+ auto=globalvar.auto,
+ removebrackets=globalvar.hintnobracket)
else:
- pages = titletranslate.translate(self.originPage, hints =
['all:'],
- auto = globalvar.auto, removebrackets =
globalvar.hintnobracket)
+ pages = titletranslate.translate(
+ self.originPage,
+ hints=['all:'],
+ auto=globalvar.auto,
+ removebrackets=globalvar.hintnobracket)
else:
- pages = titletranslate.translate(self.originPage, hints=hints,
- auto=globalvar.auto,
removebrackets=globalvar.hintnobracket,
- site=pywikibot.getSite())
+ pages = titletranslate.translate(
+ self.originPage,
+ hints=hints,
+ auto=globalvar.auto,
+ removebrackets=globalvar.hintnobracket,
+ site=pywikibot.getSite())
for page in pages:
if globalvar.contentsondisk:
page = StoredPage(page)
@@ -939,7 +952,8 @@
# Bug-check: Isn't there any work still in progress? We can't work on
# different sites at a time!
if len(self.pending) > 0:
- raise 'BUG: Can\'t start to work on %s; still working on %s' %
(site, self.pending)
+ raise "BUG: Can't start to work on %s; still working on %s" \
+ % (site, self.pending)
# Prepare a list of suitable pages
result = []
for page in self.todo.filter(site):
@@ -951,7 +965,7 @@
# If there are any, return them. Otherwise, nothing is in progress.
return result
- def makeForcedStop(self,counter):
+ def makeForcedStop(self, counter):
"""
Ends work on the page before the normal end.
"""
@@ -1011,26 +1025,33 @@
if linkedPage in self.foundIn:
# We have seen this page before, don't ask again.
return False
- elif self.originPage and self.originPage.namespace() !=
linkedPage.namespace():
+ elif self.originPage and \
+ self.originPage.namespace() != linkedPage.namespace():
# Allow for a mapping between different namespaces
- crossFrom =
self.originPage.site.family.crossnamespace.get(self.originPage.namespace(), {})
- crossTo = crossFrom.get(self.originPage.site.language(),
crossFrom.get('_default', {}))
- nsmatch = crossTo.get(linkedPage.site.language(),
crossTo.get('_default', []))
+ crossFrom = self.originPage.site.family.crossnamespace.get(
+ self.originPage.namespace(), {})
+ crossTo = crossFrom.get(self.originPage.site.language(),
+ crossFrom.get('_default', {}))
+ nsmatch = crossTo.get(linkedPage.site.language(),
+ crossTo.get('_default', []))
if linkedPage.namespace() in nsmatch:
return False
if globalvar.autonomous:
- pywikibot.output(u"NOTE: Ignoring link from page %s in
namespace %i to page %s in namespace %i."
- % (linkingPage, linkingPage.namespace(),
- linkedPage, linkedPage.namespace()))
+ pywikibot.output(
+u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i."
+ % (linkingPage, linkingPage.namespace(), linkedPage,
+ linkedPage.namespace()))
# Fill up foundIn, so that we will not write this notice
self.foundIn[linkedPage] = [linkingPage]
return True
else:
preferredPage =
self.getFoundInCorrectNamespace(linkedPage.site)
if preferredPage:
- pywikibot.output(u"NOTE: Ignoring link from page %s in
namespace %i to page %s in namespace %i because page %s in the correct
namespace has already been found."
- % (linkingPage, linkingPage.namespace(),
linkedPage,
- linkedPage.namespace(), preferredPage))
+ pywikibot.output(
+u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i "
+u"because page %s in the correct namespace has already been found."
+ % (linkingPage, linkingPage.namespace(), linkedPage,
+ linkedPage.namespace(), preferredPage))
return True
else:
choice = pywikibot.inputChoice(
@@ -1045,13 +1066,17 @@
if choice == 'g':
self.makeForcedStop(counter)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative
for language %s, not using a language code:'
- %
linkedPage.site.language())
+ newHint = pywikibot.input(
+ u'Give the alternative for language %s, not '
+ u'using a language code:'
+ % linkedPage.site.language())
if newHint:
- alternativePage =
pywikibot.Page(linkedPage.site, newHint)
+ alternativePage = pywikibot.Page(
+ linkedPage.site, newHint)
if alternativePage:
# add the page that was entered by the user
- self.addIfNew(alternativePage, counter,
None)
+ self.addIfNew(alternativePage, counter,
+ None)
else:
pywikibot.output(
u"NOTE: ignoring %s and its interwiki links"
@@ -1063,14 +1088,18 @@
return False
def wiktionaryMismatch(self, page):
- if self.originPage and globalvar.same=='wiktionary':
+ if self.originPage and globalvar.same == 'wiktionary':
if page.title().lower() != self.originPage.title().lower():
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary
mode" % (page, self.originPage))
- return True
- elif page.title() != self.originPage.title() and
self.originPage.site.nocapitalize and page.site.nocapitalize:
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode
because both languages are uncapitalized."
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode"
% (page, self.originPage))
return True
+ elif page.title() != self.originPage.title() and \
+ self.originPage.site.nocapitalize and page.site.nocapitalize:
+ pywikibot.output(
+ u"NOTE: Ignoring %s for %s in wiktionary mode because both
"
+ u"languages are uncapitalized."
+ % (page, self.originPage))
+ return True
return False
def disambigMismatch(self, page, counter):
@@ -1088,15 +1117,17 @@
chosen to use instead of the given page.
"""
if not self.originPage:
- return (False, None) # any page matches until we have an origin
page
+ return (False, None) # any page matches til we have an origin page
if globalvar.autonomous:
if self.originPage.isDisambig() and not page.isDisambig():
- pywikibot.output(u"NOTE: Ignoring link from disambiguation
page %s to non-disambiguation %s"
- % (self.originPage, page))
+ pywikibot.output(
+ u"NOTE: Ignoring link from disambiguation page %s to "
+ u"non-disambiguation %s" % (self.originPage, page))
return (True, None)
elif not self.originPage.isDisambig() and page.isDisambig():
- pywikibot.output(u"NOTE: Ignoring link from non-disambiguation
page %s to disambiguation %s"
- % (self.originPage, page))
+ pywikibot.output(
+ u"NOTE: Ignoring link from non-disambiguation page %s to "
+ u"disambiguation %s" % (self.originPage, page))
return (True, None)
else:
choice = 'y'
@@ -1104,32 +1135,39 @@
disambig = self.getFoundDisambig(page.site)
if disambig:
pywikibot.output(
- u"NOTE: Ignoring non-disambiguation page %s for %s
because disambiguation page %s has already been found."
+ u"NOTE: Ignoring non-disambiguation page %s for %s "
+ u"because disambiguation page %s has already been "
+ u"found."
% (page, self.originPage, disambig))
return (True, None)
else:
choice = pywikibot.inputChoice(
- u'WARNING: %s is a disambiguation page, but %s
doesn\'t seem to be one. Follow it anyway?'
+ u"WARNING: %s is a disambiguation page, but %s doesn't
"
+ u"seem to be one. Follow it anyway?"
% (self.originPage, page),
['Yes', 'No', 'Add an alternative', 'Give up'],
['y', 'n', 'a', 'g'])
elif not self.originPage.isDisambig() and page.isDisambig():
nondisambig = self.getFoundNonDisambig(page.site)
if nondisambig:
- pywikibot.output(u"NOTE: Ignoring disambiguation page %s
for %s because non-disambiguation page %s has already been found."
- % (page, self.originPage, nondisambig))
+ pywikibot.output(
+ u"NOTE: Ignoring disambiguation page %s for %s because
"
+ u"non-disambiguation page %s has already been found."
+ % (page, self.originPage, nondisambig))
return (True, None)
else:
choice = pywikibot.inputChoice(
- u'WARNING: %s doesn\'t seem to be a disambiguation
page, but %s is one. Follow it anyway?'
+ u'WARNING: %s doesn\'t seem to be a disambiguation '
+ u'page, but %s is one. Follow it anyway?'
% (self.originPage, page),
['Yes', 'No', 'Add an alternative', 'Give up'],
['y', 'n', 'a', 'g'])
if choice == 'n':
return (True, None)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative for language
%s, not using a language code:'
- % page.site.language())
+ newHint = pywikibot.input(
+ u'Give the alternative for language %s, not using a '
+ u'language code:' % page.site.language())
alternativePage = pywikibot.Page(page.site, newHint)
return (True, alternativePage)
elif choice == 'g':
@@ -1164,7 +1202,8 @@
return
if (self.untranslated or globalvar.askhints) and not self.hintsAsked \
and self.originPage and self.originPage.exists() \
- and not self.originPage.isRedirectPage() and not
self.originPage.isCategoryRedirect():
+ and not self.originPage.isRedirectPage() and \
+ not self.originPage.isCategoryRedirect():
# Only once!
self.hintsAsked = True
if globalvar.untranslated:
@@ -1174,17 +1213,24 @@
pywikibot.output(self.originPage.get()[:t])
# loop
while True:
- newhint = pywikibot.input(u'Give a hint (? to see
pagetext):')
+ newhint = pywikibot.input(
+ u'Give a hint (? to see pagetext):')
if newhint == '?':
t += globalvar.showtextlinkadd
pywikibot.output(self.originPage.get()[:t])
elif newhint and not ':' in newhint:
- pywikibot.output(u'Please enter a hint in the format
language:pagename or type nothing if you do not have a hint.')
+ pywikibot.output(
+ u'Please enter a hint in the format '
+ u'language:pagename or type nothing if you do not '
+ u'have a hint.')
elif not newhint:
break
else:
- pages = titletranslate.translate(self.originPage,
hints=[newhint],
- auto = globalvar.auto,
removebrackets=globalvar.hintnobracket)
+ pages = titletranslate.translate(
+ self.originPage,
+ hints=[newhint],
+ auto=globalvar.auto,
+ removebrackets=globalvar.hintnobracket)
for page in pages:
self.addIfNew(page, counter, None)
if globalvar.hintsareright:
@@ -1197,9 +1243,9 @@
In other words, all the pages in self.pending have already
been preloaded.
- The only argument is an instance
- of a counter class, that has methods minus() and plus() to keep
- counts of the total work todo.
+ The only argument is an instance of a counter class, that has methods
+ minus() and plus() to keep counts of the total work todo.
+
"""
# Loop over all the pages that should have been taken care of
for page in self.pending:
@@ -1211,9 +1257,11 @@
dictName, year = page.autoFormat()
if dictName is not None:
if self.originPage:
- pywikibot.output(u'WARNING: %s:%s relates to %s:%s,
which is an auto entry %s(%s)'
- % (self.originPage.site.language(),
self.originPage,
- page.site.language(), page,
dictName, year))
+ pywikibot.output(
+ u'WARNING: %s:%s relates to %s:%s, which is an '
+ u'auto entry %s(%s)'
+ % (self.originPage.site.language(),
self.originPage,
+ page.site.language(), page, dictName, year))
# Abort processing if the bot is running in autonomous
mode.
if globalvar.autonomous:
@@ -1231,12 +1279,13 @@
pywikibot.output(u"NOTE: %s does not exist. Skipping."
% page)
if page == self.originPage:
- # The page we are working on is the page that does not
exist.
- # No use in doing any work on it in that case.
+ # The page we are working on is the page that does not
+ # exist. No use in doing any work on it in that case.
for site, count in self.todo.siteCounts():
counter.minus(site, count)
self.todo = PageTree()
- # In some rare cases it might be we already did check some
'automatic' links
+ # In some rare cases it might be we already did check some
+ # 'automatic' links
self.done = PageTree()
continue
@@ -1253,7 +1302,8 @@
pywikibot.output(u"NOTE: %s is %sredirect to %s"
% (page, redir, redirectTargetPage))
if self.originPage is None or page == self.originPage:
- # the 1st existig page becomes the origin page, if none
was supplied
+ # the 1st existig page becomes the origin page, if none was
+ # supplied
if globalvar.initialredirect:
if globalvar.contentsondisk:
redirectTargetPage = StoredPage(redirectTargetPage)
@@ -1264,8 +1314,8 @@
self.todo.add(redirectTargetPage)
counter.plus(redirectTargetPage.site)
else:
- # This is a redirect page to the origin. We don't need
to
- # follow the redirection.
+ # This is a redirect page to the origin. We don't need
+ # to follow the redirection.
# In this case we can also stop all hints!
for site, count in self.todo.siteCounts():
counter.minus(site, count)
@@ -1309,7 +1359,8 @@
# Page exists, isnt a redirect, and is a plain link (no section)
if self.originPage is None:
- # the 1st existig page becomes the origin page, if none was
supplied
+ # the 1st existig page becomes the origin page, if none was
+ # supplied
self.originPage = page
try:
iw = page.langlinks()
@@ -1331,7 +1382,8 @@
duplicate = None
for p in self.done.filter(page.site):
- if p != page and p.exists() and not p.isRedirectPage() and not
p.isCategoryRedirect():
+ if p != page and p.exists() and \
+ not p.isRedirectPage() and not p.isCategoryRedirect():
duplicate = p
break
@@ -1341,7 +1393,8 @@
# Ignore the interwiki links.
iw = ()
if globalvar.lacklanguage:
- if globalvar.lacklanguage in [link.site.language() for
link in iw]:
+ if globalvar.lacklanguage in [link.site.language()
+ for link in iw]:
iw = ()
self.workonme = False
if len(iw) < globalvar.minlinks:
@@ -1349,8 +1402,9 @@
self.workonme = False
elif globalvar.autonomous and duplicate and not skip:
- pywikibot.output(u"Stopping work on %s because duplicate
pages"\
- " %s and %s are found" % (self.originPage, duplicate,
page))
+ pywikibot.output(u"Stopping work on %s because duplicate pages"
+ " %s and %s are found"
+ % (self.originPage, duplicate, page))
self.makeForcedStop(counter)
try:
f = codecs.open(
@@ -1652,10 +1706,10 @@
break
else:
for (site, page) in new.iteritems():
- # edit restriction for some templates on zh-wiki where
interlanguage keys are included
- # by /doc subpage
- smallWikiAllowed = not (page.site.sitename() == 'wikipedia:zh'
and
- page.namespace() == 10 and
+ # edit restriction for some templates on zh-wiki where
+ # interlanguage keys are included by /doc subpage
+ smallWikiAllowed = not (page.site.sitename() == 'wikipedia:zh'
+ and page.namespace() == 10 and
u'Country data' in
page.title(withNamespace=False))
# edit restriction on is-wiki
# http://is.wikipedia.org/wiki/Wikipediaspjall:V%C3%A9lmenni
@@ -1769,7 +1823,7 @@
# clone original newPages dictionary, so that we can modify it to the
# local page's needs
- new = dict(newPages)
+ new = newPages.copy()
interwikis = [pywikibot.Page(l) for l in page.iterlanglinks()]
# remove interwiki links to ignore
@@ -2007,10 +2061,11 @@
% (page.site.family.name,
page, expectedPage, linkedPage))
except KeyError:
- pywikibot.output(
- u"WARNING: %s: %s does not link to %s"
- % (page.site.family.name,
- page, expectedPage))
+ if not expectedPage.site.is_data_repository():
+ pywikibot.output(
+ u"WARNING: %s: %s does not link to %s"
+ % (page.site.family.name,
+ page, expectedPage))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
@@ -2429,9 +2484,9 @@
elif globalvar.summary:
globalvar.summary += u'; '
- # ensure that we don't try to change main page
site = pywikibot.getSite()
site.login()
+ # ensure that we don't try to change main page
try:
mainpagename = site.mediawiki_message('mainpage')
globalvar.skip.add(pywikibot.Page(site, mainpagename))
_______________________________________________
Pywikipedia-svn mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-svn