jenkins-bot has submitted this change and it was merged.

Change subject: Remove every implicit use of textlib
......................................................................


Remove every implicit use of textlib

follows:
* Iabede1697aa1f5c8e3039b2b404029bc6cfd55ff
* I6cc16f23fe9fbb851b26725557525619c3051a26

Change-Id: I0437028a32da219205fc30aec3921474132e1cc8
---
M scripts/add_text.py
M scripts/catall.py
M scripts/category.py
M scripts/cosmetic_changes.py
M scripts/featured.py
M scripts/flickrripper.py
M scripts/harvest_template.py
M scripts/imagerecat.py
M scripts/imagetransfer.py
M scripts/interwiki.py
M scripts/makecat.py
M scripts/noreferences.py
M scripts/reflinks.py
M scripts/replace.py
M scripts/weblinkchecker.py
15 files changed, 155 insertions(+), 166 deletions(-)

Approvals:
  John Vandenberg: Looks good to me, but someone else must approve
  XZise: Looks good to me, but someone else must approve
  Merlijn van Deen: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/scripts/add_text.py b/scripts/add_text.py
index ece394b..acbf321 100644
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -77,9 +77,7 @@
 import time
 
 import pywikibot
-from pywikibot import config
-from pywikibot import i18n
-from pywikibot import pagegenerators
+from pywikibot import config, i18n, pagegenerators, textlib
 
 docuReplacements = {
     '&params;': pagegenerators.parameterHelp,
@@ -173,23 +171,23 @@
         addText = addText.replace('\\n', config.line_separator)
         if (reorderEnabled):
             # Getting the categories
-            categoriesInside = pywikibot.getCategoryLinks(newtext, site)
+            categoriesInside = textlib.getCategoryLinks(newtext, site)
             # Deleting the categories
-            newtext = pywikibot.removeCategoryLinks(newtext, site)
+            newtext = textlib.removeCategoryLinks(newtext, site)
             # Getting the interwiki
-            interwikiInside = pywikibot.getLanguageLinks(newtext, site)
+            interwikiInside = textlib.getLanguageLinks(newtext, site)
             # Removing the interwiki
-            newtext = pywikibot.removeLanguageLinks(newtext, site)
+            newtext = textlib.removeLanguageLinks(newtext, site)
 
             # Adding the text
             newtext += u"%s%s" % (config.line_separator, addText)
             # Reputting the categories
-            newtext = pywikibot.replaceCategoryLinks(newtext,
-                                                     categoriesInside, site,
-                                                     True)
+            newtext = textlib.replaceCategoryLinks(newtext,
+                                                   categoriesInside, site,
+                                                   True)
             # Dealing the stars' issue
             allstars = []
-            starstext = pywikibot.removeDisabledParts(text)
+            starstext = textlib.removeDisabledParts(text)
             for star in starsList:
                 regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
                                    % star, re.I)
@@ -203,8 +201,8 @@
                 for element in allstars:
                     newtext += '%s%s' % (element.strip(), config.LS)
             # Adding the interwiki
-            newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside,
-                                                     site)
+            newtext = textlib.replaceLanguageLinks(newtext, interwikiInside,
+                                                   site)
         else:
             newtext += u"%s%s" % (config.line_separator, addText)
     else:
diff --git a/scripts/catall.py b/scripts/catall.py
index 1e61f11..780e52d 100755
--- a/scripts/catall.py
+++ b/scripts/catall.py
@@ -23,7 +23,7 @@
 
 import sys
 import pywikibot
-from pywikibot import i18n
+from pywikibot import i18n, textlib
 
 
 def choosecats(pagetext):
@@ -69,8 +69,8 @@
     for p in list:
         cattitle = "%s:%s" % (site.category_namespace(), p)
         pllist.append(pywikibot.Page(site, cattitle))
-    page.put_async(pywikibot.replaceCategoryLinks(page.get(), pllist,
-                                                  site=page.site),
+    page.put_async(textlib.replaceCategoryLinks(page.get(), pllist,
+                                                site=page.site),
                    comment=i18n.twtranslate(site.code, 'catall-changing'))
 
 
diff --git a/scripts/category.py b/scripts/category.py
index d5f5492..c511759 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -105,7 +105,7 @@
 import bz2
 import pywikibot
 from pywikibot import config, pagegenerators
-from pywikibot import i18n
+from pywikibot import i18n, textlib
 from pywikibot import deprecate_arg, deprecated
 
 # This is required for the text that is shown when you run this script
@@ -397,7 +397,7 @@
                 catpl = self.sorted_by_last_name(catpl, page)
             pywikibot.output(u'Adding %s' % catpl.title(asLink=True))
             cats.append(catpl)
-            text = pywikibot.replaceCategoryLinks(text, cats, site=page.site)
+            text = textlib.replaceCategoryLinks(text, cats, site=page.site)
             if not self.save(text, page, newcatTitle, old_text=old_text):
                 pywikibot.output(u'Page %s not saved.'
                                  % page.title(asLink=True))
diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py
index a69c35d..aad7bf8 100755
--- a/scripts/cosmetic_changes.py
+++ b/scripts/cosmetic_changes.py
@@ -74,7 +74,7 @@
 from distutils.version import LooseVersion as LV
 import pywikibot
 import isbn
-from pywikibot import config, i18n, pagegenerators, Bot
+from pywikibot import config, i18n, textlib, pagegenerators, Bot
 from pywikibot.page import url2unicode
 
 warning = """
@@ -259,7 +259,7 @@
         if not self.template and '{{Personendaten' not in text and \
            '{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and \
            self.site.code not in ('et', 'it', 'bg', 'ru'):
-            categories = pywikibot.getCategoryLinks(text, site=self.site)
+            categories = textlib.getCategoryLinks(text, site=self.site)
 
         if not self.talkpage:  # and pywikibot.calledModuleName() <> 
'interwiki':
             subpage = False
@@ -272,13 +272,13 @@
                     pass
                 if loc is not None and loc in self.title:
                     subpage = True
-            interwikiLinks = pywikibot.getLanguageLinks(
+            interwikiLinks = textlib.getLanguageLinks(
                 text, insite=self.site, template_subpage=subpage)
 
             # Removing the interwiki
-            text = pywikibot.removeLanguageLinks(text, site=self.site)
+            text = textlib.removeLanguageLinks(text, site=self.site)
             # Removing the stars' issue
-            starstext = pywikibot.removeDisabledParts(text)
+            starstext = textlib.removeDisabledParts(text)
             for star in starsList:
                 regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
                                    % star, re.I)
@@ -297,8 +297,8 @@
             #       if re.search(u"(.+?)\|(.{,1}?)",name.title()) or 
name.title()==name.title().split(":")[0]+title:
             #            categories.remove(name)
             #            categories.insert(0, name)
-            text = pywikibot.replaceCategoryLinks(text, categories,
-                                                  site=self.site)
+            text = textlib.replaceCategoryLinks(text, categories,
+                                                site=self.site)
         # Adding stars templates
         if allstars:
             text = text.strip() + self.site.family.interwiki_text_separator
@@ -308,10 +308,10 @@
                 pywikibot.log(u'%s' % element.strip())
         # Adding the interwiki
         if interwikiLinks:
-            text = pywikibot.replaceLanguageLinks(text, interwikiLinks,
-                                                  site=self.site,
-                                                  template=self.template,
-                                                  template_subpage=subpage)
+            text = textlib.replaceLanguageLinks(text, interwikiLinks,
+                                                site=self.site,
+                                                template=self.template,
+                                                template_subpage=subpage)
         return text
 
     def translateAndCapitalizeNamespaces(self, text):
@@ -353,7 +353,7 @@
                 namespaces[i] = item
             namespaces.append(thisNs[0].lower() + thisNs[1:])
             if thisNs and namespaces:
-                text = pywikibot.replaceExcept(
+                text = textlib.replaceExcept(
                     text,
                     r'\[\[\s*(%s) *:(?P<nameAndLabel>.*?)\]\]'
                     % '|'.join(namespaces),
@@ -375,7 +375,7 @@
                 aliases = self.site.getmagicwords(magicWord)
                 if not aliases:
                     continue
-                text = pywikibot.replaceExcept(
+                text = textlib.replaceExcept(
                     text,
                     r'\[\[(?P<left>.+?:.+?\..+?\|) *(' + '|'.join(aliases) +
                     ') *(?P<right>(\|.*?)?\]\])',
@@ -503,9 +503,9 @@
             
r'(?P<newline>[\n]*)\[\[(?P<titleWithSection>[^\]\|]+)(\|(?P<label>[^\]\|]*))?\]\](?P<linktrail>'
 +
             self.site.linktrail() + ')')
 
-        text = pywikibot.replaceExcept(text, linkR, handleOneLink,
-                                       ['comment', 'math', 'nowiki', 'pre',
-                                        'startspace'])
+        text = textlib.replaceExcept(text, linkR, handleOneLink,
+                                     ['comment', 'math', 'nowiki', 'pre',
+                                      'startspace'])
         return text
 
     def resolveHtmlEntities(self, text):
@@ -532,8 +532,8 @@
         return text
 
     def validXhtml(self, text):
-        text = pywikibot.replaceExcept(text, r'(?i)<br[ /]*>', r'<br />',
-                                       ['comment', 'math', 'nowiki', 'pre'])
+        text = textlib.replaceExcept(text, r'(?i)<br[ /]*>', r'<br />',
+                                     ['comment', 'math', 'nowiki', 'pre'])
         return text
 
     def removeUselessSpaces(self, text):
@@ -541,8 +541,8 @@
         spaceAtLineEndR = re.compile(' $')
         exceptions = ['comment', 'math', 'nowiki', 'pre', 'startspace', 
'table',
                       'template']
-        text = pywikibot.replaceExcept(text, multipleSpacesR, ' ', exceptions)
-        text = pywikibot.replaceExcept(text, spaceAtLineEndR, '', exceptions)
+        text = textlib.replaceExcept(text, multipleSpacesR, ' ', exceptions)
+        text = textlib.replaceExcept(text, spaceAtLineEndR, '', exceptions)
         return text
 
     def removeNonBreakingSpaceBeforePercent(self, text):
@@ -552,8 +552,8 @@
         manually.
 
         """
-        text = pywikibot.replaceExcept(text, r'(\d)&nbsp;%', r'\1 %',
-                                       ['timeline'])
+        text = textlib.replaceExcept(text, r'(\d)&nbsp;%', r'\1 %',
+                                     ['timeline'])
         return text
 
     def cleanUpSectionHeaders(self, text):
@@ -566,7 +566,7 @@
         German Wikipedia. It might be that it is not wanted on other wikis.
         If there are any complaints, please file a bug report.
         """
-        return pywikibot.replaceExcept(
+        return textlib.replaceExcept(
             text,
             r'(?m)^(={1,7}) *(?P<title>[^=]+?) *\1 *\r?\n',
             r'\1 \g<title> \1%s' % config.LS,
@@ -584,7 +584,7 @@
         if not self.template:
             exceptions = ['comment', 'math', 'nowiki', 'pre', 'source', 
'template',
                           'timeline', self.site.redirectRegex()]
-            text = pywikibot.replaceExcept(
+            text = textlib.replaceExcept(
                 text,
                 
r'(?m)^(?P<bullet>[:;]*(\*+|#+)[:;\*#]*)(?P<char>[^\s\*#:;].+?)',
                 '\g<bullet> \g<char>',
@@ -605,7 +605,7 @@
                     new = '{{%s}}' % new
                 if not self.site.nocapitalize:
                     old = '[' + old[0].upper() + old[0].lower() + ']' + old[1:]
-                text = pywikibot.replaceExcept(
+                text = textlib.replaceExcept(
                     text,
                     r'\{\{([mM][sS][gG]:)?%s(?P<parameters>\|[^}]+|)}}' % old,
                     new, exceptions)
@@ -618,30 +618,30 @@
         # link to the wiki working on
         ## TODO: disable this for difflinks and titled links
         ## 
https://de.wikipedia.org/w/index.php?title=Wikipedia%3aVandalismusmeldung&diff=103109563&oldid=103109271
-##        text = pywikibot.replaceExcept(text,
-##                                       
r'\[https?://%s\.%s\.org/wiki/(?P<link>\S+)\s+(?P<title>.+?)\s?\]'
-##                                       % (self.site.code, 
self.site.family.name),
-##                                       r'[[\g<link>|\g<title>]]', exceptions)
+##        text = textlib.replaceExcept(text,
+##                                     
r'\[https?://%s\.%s\.org/wiki/(?P<link>\S+)\s+(?P<title>.+?)\s?\]'
+##                                     % (self.site.code, 
self.site.family.name),
+##                                     r'[[\g<link>|\g<title>]]', exceptions)
         # external link in double brackets
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'\[\[(?P<url>https?://[^\]]+?)\]\]',
             r'[\g<url>]', exceptions)
         # external link starting with double bracket
-        text = pywikibot.replaceExcept(text,
-                                       r'\[\[(?P<url>https?://.+?)\]',
-                                       r'[\g<url>]', exceptions)
+        text = textlib.replaceExcept(text,
+                                     r'\[\[(?P<url>https?://.+?)\]',
+                                     r'[\g<url>]', exceptions)
         # external link and description separated by a dash, with
         # whitespace in front of the dash, so that it is clear that
         # the dash is not a legitimate part of the URL.
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'\[(?P<url>https?://[^\|\] \r\n]+?) +\| *(?P<label>[^\|\]]+?)\]',
             r'[\g<url> \g<label>]', exceptions)
         # dash in external link, where the correct end of the URL can
         # be detected from the file extension. It is very unlikely that
         # this will cause mistakes.
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'\[(?P<url>https?://[^\|\] 
]+?(\.pdf|\.html|\.htm|\.php|\.asp|\.aspx|\.jsp)) *\| *(?P<label>[^\|\]]+?)\]',
             r'[\g<url> \g<label>]', exceptions)
@@ -652,26 +652,26 @@
         # Keep in mind that MediaWiki automatically converts <br> to <br />
         exceptions = ['nowiki', 'comment', 'math', 'pre', 'source',
                       'startspace']
-        text = pywikibot.replaceExcept(text, r'(?i)<b>(.*?)</b>', r"'''\1'''",
-                                       exceptions)
-        text = pywikibot.replaceExcept(text, r'(?i)<strong>(.*?)</strong>',
-                                       r"'''\1'''", exceptions)
-        text = pywikibot.replaceExcept(text, r'(?i)<i>(.*?)</i>', r"''\1''",
-                                       exceptions)
-        text = pywikibot.replaceExcept(text, r'(?i)<em>(.*?)</em>', r"''\1''",
-                                       exceptions)
+        text = textlib.replaceExcept(text, r'(?i)<b>(.*?)</b>', r"'''\1'''",
+                                     exceptions)
+        text = textlib.replaceExcept(text, r'(?i)<strong>(.*?)</strong>',
+                                     r"'''\1'''", exceptions)
+        text = textlib.replaceExcept(text, r'(?i)<i>(.*?)</i>', r"''\1''",
+                                     exceptions)
+        text = textlib.replaceExcept(text, r'(?i)<em>(.*?)</em>', r"''\1''",
+                                     exceptions)
         # horizontal line without attributes in a single line
-        text = pywikibot.replaceExcept(text, r'(?i)([\r\n])<hr[ /]*>([\r\n])',
-                                       r'\1----\2', exceptions)
+        text = textlib.replaceExcept(text, r'(?i)([\r\n])<hr[ /]*>([\r\n])',
+                                     r'\1----\2', exceptions)
         # horizontal line with attributes; can't be done with wiki syntax
         # so we only make it XHTML compliant
-        text = pywikibot.replaceExcept(text, r'(?i)<hr ([^>/]+?)>',
-                                       r'<hr \1 />',
-                                       exceptions)
+        text = textlib.replaceExcept(text, r'(?i)<hr ([^>/]+?)>',
+                                     r'<hr \1 />',
+                                     exceptions)
         # a header where only spaces are in the same line
         for level in range(1, 7):
             equals = '\\1%s \\2 %s\\3' % ("=" * level, "=" * level)
-            text = pywikibot.replaceExcept(
+            text = textlib.replaceExcept(
                 text,
                 r'(?i)([\r\n]) *<h%d> *([^<]+?) *</h%d> *([\r\n])'
                 % (level, level),
@@ -688,12 +688,12 @@
         # it should be name = " or name=" NOT name   ="
         text = re.sub(r'(?i)<ref +name(= *| *=)"', r'<ref name="', text)
         #remove empty <ref/>-tag
-        text = pywikibot.replaceExcept(text,
-                                       r'(?i)(<ref\s*/>|<ref *>\s*</ref>)',
-                                       r'', exceptions)
-        text = pywikibot.replaceExcept(text,
-                                       r'(?i)<ref\s+([^>]+?)\s*>\s*</ref>',
-                                       r'<ref \1/>', exceptions)
+        text = textlib.replaceExcept(text,
+                                     r'(?i)(<ref\s*/>|<ref *>\s*</ref>)',
+                                     r'', exceptions)
+        text = textlib.replaceExcept(text,
+                                     r'(?i)<ref\s+([^>]+?)\s*>\s*</ref>',
+                                     r'<ref \1/>', exceptions)
         return text
 
     def fixStyle(self, text):
@@ -701,30 +701,30 @@
                       'startspace']
         # convert prettytable to wikitable class
         if self.site.code in ('de', 'en'):
-            text = pywikibot.replaceExcept(text,
-                                           
r'(class="[^"]*)prettytable([^"]*")',
-                                           r'\1wikitable\2', exceptions)
+            text = textlib.replaceExcept(text,
+                                         r'(class="[^"]*)prettytable([^"]*")',
+                                         r'\1wikitable\2', exceptions)
         return text
 
     def fixTypo(self, text):
         exceptions = ['nowiki', 'comment', 'math', 'pre', 'source',
                       'startspace', 'gallery', 'hyperlink', 'interwiki', 
'link']
         # change <number> ccm -> <number> cm³
-        text = pywikibot.replaceExcept(text, r'(\d)\s*&nbsp;ccm',
-                                       r'\1&nbsp;' + u'cm³', exceptions)
-        text = pywikibot.replaceExcept(text,
-                                       r'(\d)\s*ccm', r'\1&nbsp;' + u'cm³',
-                                       exceptions)
+        text = textlib.replaceExcept(text, r'(\d)\s*&nbsp;ccm',
+                                     r'\1&nbsp;' + u'cm³', exceptions)
+        text = textlib.replaceExcept(text,
+                                     r'(\d)\s*ccm', r'\1&nbsp;' + u'cm³',
+                                     exceptions)
         # Solve wrong Nº sign with °C or °F
         # additional exception requested on fr-wiki for this stuff
         pattern = re.compile(u'«.*?»', re.UNICODE)
         exceptions.append(pattern)
-        text = pywikibot.replaceExcept(text, r'(\d)\s*&nbsp;' + u'[º°]([CF])',
-                                       r'\1&nbsp;' + u'°' + r'\2', exceptions)
-        text = pywikibot.replaceExcept(text, r'(\d)\s*' + u'[º°]([CF])',
-                                       r'\1&nbsp;' + u'°' + r'\2', exceptions)
-        text = pywikibot.replaceExcept(text, u'º([CF])', u'°' + r'\1',
-                                       exceptions)
+        text = textlib.replaceExcept(text, r'(\d)\s*&nbsp;' + u'[º°]([CF])',
+                                     r'\1&nbsp;' + u'°' + r'\2', exceptions)
+        text = textlib.replaceExcept(text, r'(\d)\s*' + u'[º°]([CF])',
+                                     r'\1&nbsp;' + u'°' + r'\2', exceptions)
+        text = textlib.replaceExcept(text, u'º([CF])', u'°' + r'\1',
+                                     exceptions)
         return text
 
     def fixArabicLetters(self, text):
@@ -762,20 +762,20 @@
         exceptions.append(re.compile(u"[^%(fa)s] *?\"*? *?, *?[^%(fa)s]"
                                      % {'fa': faChrs}))
         exceptions.append(pattern)
-        text = pywikibot.replaceExcept(text, u',', u'،', exceptions)
+        text = textlib.replaceExcept(text, u',', u'،', exceptions)
         if self.site.code == 'ckb':
-            text = pywikibot.replaceExcept(text,
-                                           u'\u0647([.\u060c_<\\]\\s])',
-                                           u'\u06d5\\1', exceptions)
-            text = pywikibot.replaceExcept(text, u'ه‌', u'ە', exceptions)
-            text = pywikibot.replaceExcept(text, u'ه', u'ھ', exceptions)
-        text = pywikibot.replaceExcept(text, u'ك', u'ک', exceptions)
-        text = pywikibot.replaceExcept(text, u'[ىي]', u'ی', exceptions)
+            text = textlib.replaceExcept(text,
+                                         u'\u0647([.\u060c_<\\]\\s])',
+                                         u'\u06d5\\1', exceptions)
+            text = textlib.replaceExcept(text, u'ه‌', u'ە', exceptions)
+            text = textlib.replaceExcept(text, u'ه', u'ھ', exceptions)
+        text = textlib.replaceExcept(text, u'ك', u'ک', exceptions)
+        text = textlib.replaceExcept(text, u'[ىي]', u'ی', exceptions)
         return text
         # replace persian/arabic digits
         # deactivated due to bug 55185
         for i in range(0, 10):
-            text = pywikibot.replaceExcept(text, old[i], new[i], exceptions)
+            text = textlib.replaceExcept(text, old[i], new[i], exceptions)
         # do not change digits in class, style and table params
         pattern = re.compile(u'\w+=(".+?"|\d+)', re.UNICODE)
         exceptions.append(pattern)
@@ -785,7 +785,7 @@
         exceptions.append('table')  # exclude tables for now
         # replace digits
         for i in range(0, 10):
-            text = pywikibot.replaceExcept(text, str(i), new[i], exceptions)
+            text = textlib.replaceExcept(text, str(i), new[i], exceptions)
         return text
 
     # Retrieved from 
"https://commons.wikimedia.org/wiki/Commons:Tools/pywiki_file_description_cleanup";
@@ -793,46 +793,46 @@
         # section headers to {{int:}} versions
         exceptions = ['comment', 'includeonly', 'math', 'noinclude', 'nowiki',
                       'pre', 'source', 'ref', 'timeline']
-        text = pywikibot.replaceExcept(text,
-                                       r"([\r\n]|^)\=\= *Summary *\=\=",
-                                       r"\1== {{int:filedesc}} ==",
-                                       exceptions, True)
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(text,
+                                     r"([\r\n]|^)\=\= *Summary *\=\=",
+                                     r"\1== {{int:filedesc}} ==",
+                                     exceptions, True)
+        text = textlib.replaceExcept(
             text,
             r"([\r\n])\=\= *\[\[Commons:Copyright tags\|Licensing\]\]: *\=\=",
             r"\1== {{int:license-header}} ==", exceptions, True)
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r"([\r\n])\=\= *(Licensing|License information|{{int:license}}) 
*\=\=",
             r"\1== {{int:license-header}} ==", exceptions, True)
 
         # frequent field values to {{int:}} versions
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'([\r\n]\|[Ss]ource *\= *)(?:[Oo]wn work by uploader|[Oo]wn 
work|[Ee]igene [Aa]rbeit) *([\r\n])',
             r'\1{{own}}\2', exceptions, True)
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'(\| *Permission *\=) *(?:[Ss]ee below|[Ss]iehe unten) *([\r\n])',
             r'\1\2', exceptions, True)
 
         # added to transwikied pages
-        text = pywikibot.replaceExcept(text, r'__NOTOC__', '', exceptions, 
True)
+        text = textlib.replaceExcept(text, r'__NOTOC__', '', exceptions, True)
 
         # tracker element for js upload form
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'<!-- *{{ImageUpload\|(?:full|basic)}} *-->',
             '', exceptions[1:], True)
-        text = pywikibot.replaceExcept(text, 
r'{{ImageUpload\|(?:basic|full)}}',
-                                       '', exceptions, True)
+        text = textlib.replaceExcept(text, r'{{ImageUpload\|(?:basic|full)}}',
+                                     '', exceptions, True)
 
         # duplicated section headers
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'([\r\n]|^)\=\= *{{int:filedesc}} *\=\=(?:[\r\n ]*)\=\= 
*{{int:filedesc}} *\=\=',
             r'\1== {{int:filedesc}} ==', exceptions, True)
-        text = pywikibot.replaceExcept(
+        text = textlib.replaceExcept(
             text,
             r'([\r\n]|^)\=\= *{{int:license-header}} *\=\=(?:[\r\n ]*)\=\= 
*{{int:license-header}} *\=\=',
             r'\1== {{int:license-header}} ==', exceptions, True)
diff --git a/scripts/featured.py b/scripts/featured.py
index 9e25425..860190c 100644
--- a/scripts/featured.py
+++ b/scripts/featured.py
@@ -55,7 +55,7 @@
 # (C) Maxim Razin, 2005
 # (C) Leonardo Gregianin, 2005-2008
 # (C) xqt, 2009-2014
-# (C) Pywikibot team, 2005-2012
+# (C) Pywikibot team, 2005-2014
 #
 # Distributed under the terms of the MIT license.
 #
@@ -65,7 +65,7 @@
 import pickle
 import re
 import pywikibot
-from pywikibot import i18n
+from pywikibot import i18n, textlib
 from pywikibot.pagegenerators import PreloadingGenerator
 from pywikibot.config2 import LS  # line separator
 
@@ -566,12 +566,12 @@
                                 text[m1.end():])
                     else:
                         # Moving {{Link FA|xx}} to top of interwikis
-                        iw = pywikibot.getLanguageLinks(text, tosite)
-                        text = pywikibot.removeLanguageLinks(text, tosite)
+                        iw = textlib.getLanguageLinks(text, tosite)
+                        text = textlib.removeLanguageLinks(text, tosite)
                         text += u"%s{{%s|%s}}%s" % (LS, add_tl[0],
                                                     fromsite.code, LS)
-                        text = pywikibot.replaceLanguageLinks(text,
-                                                              iw, tosite)
+                        text = textlib.replaceLanguageLinks(text,
+                                                            iw, tosite)
                     changed = True
         if remove_tl:
             if m2:
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 069a0b9..98bad39 100644
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -25,7 +25,7 @@
 """
 #
 # (C) Multichill, 2009
-# (C) Pywikibot team, 2009-2013
+# (C) Pywikibot team, 2009-2014
 #
 # Distributed under the terms of the MIT license.
 #
@@ -39,7 +39,7 @@
 import base64
 import time
 import pywikibot
-from pywikibot import config
+from pywikibot import config, textlib
 import upload
 
 try:
@@ -236,9 +236,9 @@
     """
     description = u'== {{int:filedesc}} ==\n%s' % flinfoDescription
     if removeCategories:
-        description = pywikibot.removeCategoryLinks(description,
-                                                    pywikibot.Site(
-                                                        'commons', 'commons'))
+        description = textlib.removeCategoryLinks(description,
+                                                  pywikibot.Site(
+                                                      'commons', 'commons'))
     if override:
         description = description.replace(u'{{cc-by-sa-2.0}}\n', u'')
         description = description.replace(u'{{cc-by-2.0}}\n', u'')
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index 1f09720..6224e24 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -32,7 +32,7 @@
 
 import re
 import pywikibot
-from pywikibot import pagegenerators as pg, WikidataBot
+from pywikibot import pagegenerators as pg, textlib, WikidataBot
 
 docuReplacements = {'&params;': pywikibot.pagegenerators.parameterHelp}
 
@@ -125,7 +125,7 @@
             pywikibot.output(u'%s item %s has claims for all properties. 
Skipping' % (page, item.title()))
         else:
             pagetext = page.get()
-            templates = pywikibot.extract_templates_and_params(pagetext)
+            templates = textlib.extract_templates_and_params(pagetext)
             for (template, fielddict) in templates:
                 # Clean up template
                 try:
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index 88c2318..f000a59 100644
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -40,7 +40,7 @@
 import xml.etree.ElementTree
 
 import pywikibot
-from pywikibot import pagegenerators
+from pywikibot import pagegenerators, textlib
 
 category_blacklist = []
 countries = []
@@ -390,7 +390,7 @@
 
 def saveImagePage(imagepage, newcats, usage, galleries, onlyFilter):
     """ Remove the old categories and add the new categories to the image. """
-    newtext = pywikibot.removeCategoryLinks(imagepage.text, imagepage.site)
+    newtext = textlib.removeCategoryLinks(imagepage.text, imagepage.site)
     if not onlyFilter:
         newtext = removeTemplates(newtext)
         newtext = newtext + getCheckCategoriesTemplate(usage, galleries,
diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py
index 2706554..dba7b0c 100644
--- a/scripts/imagetransfer.py
+++ b/scripts/imagetransfer.py
@@ -36,8 +36,7 @@
 import sys
 import pywikibot
 import upload
-from pywikibot import config
-from pywikibot import i18n
+from pywikibot import config, i18n, textlib
 
 copy_message = {
     'ar': u"هذه الصورة تم نقلها من %s. الوصف الأصلي كان:\r\n\r\n%s",
@@ -178,9 +177,9 @@
                                                   
self.targetSite.sitename())].iteritems():
                     new = '{{%s}}' % new
                     old = re.compile('{{%s}}' % old)
-                    description = pywikibot.replaceExcept(description, old, 
new,
-                                                          ['comment', 'math',
-                                                           'nowiki', 'pre'])
+                    description = textlib.replaceExcept(description, old, new,
+                                                        ['comment', 'math',
+                                                         'nowiki', 'pre'])
 
             description = i18n.translate(self.targetSite, copy_message,
                                          fallback=True) % (sourceSite, 
description)
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 6760a6c..8739377 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -337,7 +337,7 @@
 # (C) Daniel Herding, 2004
 # (C) Yuri Astrakhan, 2005-2006
 # (C) xqt, 2009-2014
-# (C) Pywikibot team, 2007-2013
+# (C) Pywikibot team, 2007-2014
 #
 # Distributed under the terms of the MIT license.
 #
@@ -354,11 +354,7 @@
 import socket
 import webbrowser
 import pywikibot
-from pywikibot import config
-from pywikibot import pagegenerators
-from pywikibot import i18n
-from pywikibot import interwiki_graph
-from pywikibot import titletranslate
+from pywikibot import config, i18n, pagegenerators, textlib, interwiki_graph, 
titletranslate
 
 docuReplacements = {
     '&pagegenerators_help;': pagegenerators.parameterHelp
@@ -1919,9 +1915,9 @@
         pywikibot.output(u"Changes to be made: %s" % mods)
         oldtext = page.get()
         template = (page.namespace() == 10)
-        newtext = pywikibot.replaceLanguageLinks(oldtext, new,
-                                                 site=page.site,
-                                                 template=template)
+        newtext = textlib.replaceLanguageLinks(oldtext, new,
+                                               site=page.site,
+                                               template=template)
         # This is for now. Later there should be different funktions for each
         # kind
         if not botMayEdit(page):
diff --git a/scripts/makecat.py b/scripts/makecat.py
index f005154..4eed250 100644
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -43,7 +43,7 @@
 import sys
 import codecs
 import pywikibot
-from pywikibot import date, pagegenerators, i18n
+from pywikibot import date, pagegenerators, i18n, textlib
 
 
 def isdate(s):
@@ -89,7 +89,7 @@
                             pl.change_category(actualworkingcat)
                             break
                 else:
-                    pl.put(pywikibot.replaceCategoryLinks(
+                    pl.put(textlib.replaceCategoryLinks(
                         text, cats + [actualworkingcat], site=pl.site))
     if cl:
         if checkforward:
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 718a47d..be337ec 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -42,7 +42,7 @@
 
 import re
 import pywikibot
-from pywikibot import i18n, pagegenerators, Bot
+from pywikibot import i18n, pagegenerators, textlib, Bot
 
 # This is required for the text that is shown when you run this script
 # with the parameter -help.
@@ -443,7 +443,7 @@
         import xmlreader
         dump = xmlreader.XmlDump(self.xmlFilename)
         for entry in dump.parse():
-            text = pywikibot.removeDisabledParts(entry.text)
+            text = textlib.removeDisabledParts(entry.text)
             if self.refR.search(text) and not self.referencesR.search(text):
                 yield pywikibot.Page(pywikibot.Site(), entry.title)
 
@@ -479,7 +479,7 @@
         """
         Checks whether or not the page is lacking a references tag.
         """
-        oldTextCleaned = pywikibot.removeDisabledParts(text)
+        oldTextCleaned = textlib.removeDisabledParts(text)
         if self.referencesR.search(oldTextCleaned) or \
            self.referencesTagR.search(oldTextCleaned):
             if self.getOption('verbose'):
@@ -516,7 +516,7 @@
             while index < len(oldText):
                 match = sectionR.search(oldText, index)
                 if match:
-                    if pywikibot.isDisabled(oldText, match.start()):
+                    if textlib.isDisabled(oldText, match.start()):
                         pywikibot.output(
                             'Existing  %s section is commented out, skipping.'
                             % section)
@@ -543,7 +543,7 @@
             while index < len(oldText):
                 match = sectionR.search(oldText, index)
                 if match:
-                    if pywikibot.isDisabled(oldText, match.start()):
+                    if textlib.isDisabled(oldText, match.start()):
                         pywikibot.output(
                             'Existing %s section is commented out, won\'t add '
                             'the references in front of it.' % section)
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 93617bf..7293945 100644
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -54,7 +54,7 @@
 import StringIO
 
 import pywikibot
-from pywikibot import i18n, pagegenerators, xmlreader, Bot
+from pywikibot import i18n, pagegenerators, textlib, xmlreader, Bot
 import noreferences
 
 docuReplacements = {
@@ -512,7 +512,7 @@
 
             # for each link to change
             for match in linksInRef.finditer(
-                    pywikibot.removeDisabledParts(page.get())):
+                    textlib.removeDisabledParts(page.get())):
 
                 link = match.group(u'url')
                 # debugging purpose
diff --git a/scripts/replace.py b/scripts/replace.py
index 4f1dc75..c88dcdc 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -128,7 +128,7 @@
 import pywikibot
 from pywikibot import pagegenerators
 from pywikibot import editor as editarticle
-from pywikibot import i18n
+from pywikibot import i18n, textlib
 import webbrowser
 
 # Imports predefined replacements tasks from fixes.py
@@ -186,7 +186,7 @@
                         and not self.isTextExcepted(entry.text):
                     new_text = entry.text
                     for old, new in self.replacements:
-                        new_text = pywikibot.replaceExcept(
+                        new_text = textlib.replaceExcept(
                             new_text, old, new, self.excsInside, self.site)
                     if new_text != entry.text:
                         yield pywikibot.Page(self.site, entry.title)
@@ -257,7 +257,7 @@
                 regular expressions.
             inside-tags
                 A list of strings. These strings must be keys from the
-                exceptionRegexes dictionary in pywikibot.replaceExcept().
+                exceptionRegexes dictionary in textlib.replaceExcept().
 
         """
         self.generator = generator
@@ -315,9 +315,9 @@
         for old, new in self.replacements:
             if self.sleep is not None:
                 time.sleep(self.sleep)
-            new_text = pywikibot.replaceExcept(new_text, old, new, exceptions,
-                                               allowoverlap=self.allowoverlap,
-                                               site=self.site)
+            new_text = textlib.replaceExcept(new_text, old, new, exceptions,
+                                             allowoverlap=self.allowoverlap,
+                                             site=self.site)
         return new_text
 
     def run(self):
@@ -363,9 +363,9 @@
                     cats = page.categories(nofollow_redirects=True)
                     if self.addedCat not in cats:
                         cats.append(self.addedCat)
-                        new_text = pywikibot.replaceCategoryLinks(new_text,
-                                                                  cats,
-                                                                  
site=page.site)
+                        new_text = textlib.replaceCategoryLinks(new_text,
+                                                                cats,
+                                                                site=page.site)
                 # Show the title of the page we're working on.
                 # Highlight the title in purple.
                 pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index cf50a5f..8d7b223 100644
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -107,11 +107,7 @@
 import time
 
 import pywikibot
-from pywikibot import i18n
-from pywikibot import config
-from pywikibot import pagegenerators
-from pywikibot import xmlreader
-from pywikibot import weblib
+from pywikibot import i18n, config, pagegenerators, textlib, xmlreader, weblib
 
 docuReplacements = {
     '&params;': pagegenerators.parameterHelp
@@ -144,7 +140,7 @@
 
 
 def weblinksIn(text, withoutBracketed=False, onlyBracketed=False):
-    text = pywikibot.removeDisabledParts(text)
+    text = textlib.removeDisabledParts(text)
 
     # MediaWiki parses templates before parsing external links. Thus, there
     # might be a | or a } directly after a URL which does not belong to
@@ -168,8 +164,8 @@
 
     # Remove HTML comments in URLs as well as URLs in HTML comments.
     # Also remove text inside nowiki links etc.
-    text = pywikibot.removeDisabledParts(text)
-    linkR = pywikibot.compileLinkR(withoutBracketed, onlyBracketed)
+    text = textlib.removeDisabledParts(text)
+    linkR = textlib.compileLinkR(withoutBracketed, onlyBracketed)
     for m in linkR.finditer(text):
         if m.group('url'):
             yield m.group('url')

-- 
To view, visit https://gerrit.wikimedia.org/r/155008
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I0437028a32da219205fc30aec3921474132e1cc8
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ricordisamoa <[email protected]>
Gerrit-Reviewer: John Vandenberg <[email protected]>
Gerrit-Reviewer: Ladsgroup <[email protected]>
Gerrit-Reviewer: Merlijn van Deen <[email protected]>
Gerrit-Reviewer: XZise <[email protected]>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
Pywikibot-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikibot-commits

Reply via email to