jenkins-bot has submitted this change and it was merged. ( 
https://gerrit.wikimedia.org/r/433387 )

Change subject: [IMPR] use context manager for closing a file
......................................................................

[IMPR] use context manager for closing a file

Change-Id: I7e73bb73fbf0a0b8355c26113fc31c756385eb7c
---
M pywikibot/login.py
M pywikibot/pagegenerators.py
M scripts/category.py
M scripts/imagecopy.py
M scripts/interwiki.py
M scripts/makecat.py
M scripts/solve_disambiguation.py
M scripts/weblinkchecker.py
8 files changed, 68 insertions(+), 78 deletions(-)

Approvals:
  Zhuyifei1999: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/login.py b/pywikibot/login.py
index 0a0d4c5..88e52bc 100644
--- a/pywikibot/login.py
+++ b/pywikibot/login.py
@@ -207,9 +207,8 @@
         filename = config.datafilepath('pywikibot.lwp')
         pywikibot.debug(u"Storing cookies to %s" % filename,
                         _logger)
-        f = open(filename, 'w')
-        f.write(data)
-        f.close()
+        with open(filename, 'w') as f:
+            f.write(data)

     def readPassword(self):
         """
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 8208fad..5601770 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1463,24 +1463,24 @@
         filename = pywikibot.input(u'Please enter the filename:')
     if site is None:
         site = pywikibot.Site()
-    f = codecs.open(filename, 'r', config.textfile_encoding)
-    linkmatch = None
-    for linkmatch in pywikibot.link_regex.finditer(f.read()):
-        # If the link is in interwiki format, the Page object may reside
-        # on a different Site than the default.
-        # This makes it possible to work on different wikis using a single
-        # text file, but also could be dangerous because you might
-        # inadvertently change pages on another wiki!
-        yield pywikibot.Page(pywikibot.Link(linkmatch.group("title"), site))
-    if linkmatch is None:
-        f.seek(0)
-        for title in f:
-            title = title.strip()
-            if '|' in title:
-                title = title[:title.index('|')]
-            if title:
-                yield pywikibot.Page(site, title)
-    f.close()
+    with codecs.open(filename, 'r', config.textfile_encoding) as f:
+        linkmatch = None
+        for linkmatch in pywikibot.link_regex.finditer(f.read()):
+            # If the link is in interwiki format, the Page object may reside
+            # on a different Site than the default.
+            # This makes it possible to work on different wikis using a single
+            # text file, but also could be dangerous because you might
+            # inadvertently change pages on another wiki!
+            yield pywikibot.Page(pywikibot.Link(linkmatch.group('title'),
+                                                site))
+        if linkmatch is None:
+            f.seek(0)
+            for title in f:
+                title = title.strip()
+                if '|' in title:
+                    title = title[:title.index('|')]
+                if title:
+                    yield pywikibot.Page(site, title)


 def PagesFromTitlesGenerator(iterable, site=None):
diff --git a/scripts/category.py b/scripts/category.py
index 7fec876..af2083e 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -1125,9 +1125,8 @@
         pywikibot.output(u'')
         if self.filename:
             pywikibot.output(u'Saving results in %s' % self.filename)
-            f = codecs.open(self.filename, 'a', 'utf-8')
-            f.write(tree)
-            f.close()
+            with codecs.open(self.filename, 'a', 'utf-8') as f:
+                f.write(tree)
         else:
             pywikibot.stdout(tree)

diff --git a/scripts/imagecopy.py b/scripts/imagecopy.py
index b1a1eb0..6fd6fe4 100644
--- a/scripts/imagecopy.py
+++ b/scripts/imagecopy.py
@@ -75,17 +75,17 @@
 import threading
 import webbrowser

-import pywikibot
+from os import path

 from requests.exceptions import RequestException

-from pywikibot import pagegenerators, config, i18n
+import pywikibot

+from pywikibot import config, i18n, pagegenerators
 from pywikibot.comms.http import fetch
-
 from pywikibot.specialbots import UploadRobot

-from scripts import image
+from scripts.image import ImageRobot

 try:
     from pywikibot.userinterfaces.gui import Tkdialog, Tkinter
@@ -333,7 +333,7 @@
             # If the image is uploaded under a different name, replace all
             # instances
             if self.imagePage.title(withNamespace=False) != self.newname:
-                imagebot = image.ImageRobot(
+                imagebot = ImageRobot(
                     generator=self.preloadingGen,
                     oldImage=self.imagePage.title(withNamespace=False),
                     newImage=self.newname,
@@ -374,19 +374,15 @@
 # -label ok skip view
 # textarea
 archivo = config.datafilepath("Uploadbot.localskips.txt")
-try:
-    open(archivo, 'r')
-except IOError:
-    tocreate = open(archivo, 'w')
-    tocreate.write("{{NowCommons")
-    tocreate.close()
+if not path.exists(archivo):
+    with open(archivo, 'w') as tocreate:
+        tocreate.write('{{NowCommons')


 def getautoskip():
     """Get a list of templates to skip."""
-    f = codecs.open(archivo, 'r', 'utf-8')
-    txt = f.read()
-    f.close()
+    with codecs.open(archivo, 'r', 'utf-8') as f:
+        txt = f.read()
     toreturn = txt.split('{{')[1:]
     return toreturn

diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 82d4f94..7521e06 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1160,11 +1160,10 @@
         self.conf.note('{} does not have any interwiki links'
                        .format(self.originPage))
         if config.without_interwiki:
-            f = codecs.open(
-                pywikibot.config.datafilepath('without_interwiki.txt'),
-                'a', 'utf-8')
-            f.write(u"# %s \n" % page)
-            f.close()
+            with codecs.open(
+                    pywikibot.config.datafilepath('without_interwiki.txt'),
+                    'a', 'utf-8') as f:
+                f.write('# {} \n'.format(page))

     def askForHints(self, counter):
         """Ask for hints to other sites."""
@@ -1369,21 +1368,21 @@
                                  % (self.originPage, duplicate, page))
                 self.makeForcedStop(counter)
                 try:
-                    f = codecs.open(
+                    with codecs.open(
                         pywikibot.config.datafilepath(
                             'autonomous_problems.dat'),
-                        'a', 'utf-8')
-                    f.write(u"* %s {Found more than one link for %s}"
-                            % (self.originPage, page.site))
-                    if config.interwiki_graph and config.interwiki_graph_url:
-                        filename = interwiki_graph.getFilename(
-                            self.originPage,
-                            extension=config.interwiki_graph_formats[0])
-                        f.write(
-                            ' [%s%s graph]'
-                            % (config.interwiki_graph_url, filename))
-                    f.write("\n")
-                    f.close()
+                            'a', 'utf-8') as f:
+                        f.write('* %s {Found more than one link for %s}'
+                                % (self.originPage, page.site))
+                        if config.interwiki_graph \
+                           and config.interwiki_graph_url:
+                            filename = interwiki_graph.getFilename(
+                                self.originPage,
+                                extension=config.interwiki_graph_formats[0])
+                            f.write(' [{}{} graph]'
+                                    .format(config.interwiki_graph_url,
+                                            filename))
+                        f.write('\n')
                 # FIXME: What errors are we catching here?
                 except Exception:
                     # raise
diff --git a/scripts/makecat.py b/scripts/makecat.py
index 4305fdd..4f2e867 100755
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -37,7 +37,7 @@
 L(ist) - show current list of pages to include or to check
 """
 # (C) Andre Engels, 2004
-# (C) Pywikibot team, 2005-2017
+# (C) Pywikibot team, 2005-2018
 #
 # Distributed under the terms of the MIT license.
 #
@@ -225,17 +225,15 @@
         workingcatname.encode('ascii', 'xmlcharrefreplace').decode('ascii') +
         '_exclude.txt')
     try:
-        f = codecs.open(filename, 'r', encoding=mysite.encoding())
-        for line in f.readlines():
-            # remove trailing newlines and carriage returns
-            try:
-                while line[-1] in ['\n', '\r']:
-                    line = line[:-1]
-            except IndexError:
-                pass
-            pl = pywikibot.Page(mysite, line)
-            checked[pl] = pl
-        f.close()
+        with codecs.open(filename, 'r', encoding=mysite.encoding()) as f:
+            for line in f.readlines():
+                # remove leading and trailing spaces, LF and CR
+                line = line.strip()
+                if not line:
+                    continue
+                pl = pywikibot.Page(mysite, line)
+                checked[pl] = pl
+
         excludefile = codecs.open(filename, 'a', encoding=mysite.encoding())
     except IOError:
         # File does not exist
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 769293a..50e5001 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -502,9 +502,8 @@
                 self.disambPage.title(asUrl=True) + '.txt')
             try:
                 # Open file for appending. If none exists, create a new one.
-                f = codecs.open(filename, 'a', 'utf-8')
-                f.write(refPage.title(asUrl=True) + '\n')
-                f.close()
+                with codecs.open(filename, 'a', 'utf-8') as f:
+                    f.write(refPage.title(asUrl=True) + '\n')
             except IOError:
                 pass

diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index 161692e..e0c4ee8 100755
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -686,13 +686,13 @@
                                                     'results-%s-%s.txt'
                                                     % (self.site.family.name,
                                                        self.site.lang))
-        txtfile = codecs.open(txtfilename, 'a', 'utf-8')
-        self.logCount += 1
-        if self.logCount % 30 == 0:
-            # insert a caption
-            txtfile.write('=== %s ===\n' % containingPage.title()[:3])
-        txtfile.write(errorReport)
-        txtfile.close()
+        with codecs.open(txtfilename, 'a', 'utf-8') as txtfile:
+            self.logCount += 1
+            if self.logCount % 30 == 0:
+                # insert a caption
+                txtfile.write('=== {} ===\n'
+                              .format(containingPage.title()[:3]))
+            txtfile.write(errorReport)

         if self.reportThread and not containingPage.isTalkPage():
             self.reportThread.report(url, errorReport, containingPage,

--
To view, visit https://gerrit.wikimedia.org/r/433387
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I7e73bb73fbf0a0b8355c26113fc31c756385eb7c
Gerrit-Change-Number: 433387
Gerrit-PatchSet: 8
Gerrit-Owner: Xqt <i...@gno.de>
Gerrit-Reviewer: Dalba <dalba.w...@gmail.com>
Gerrit-Reviewer: Dvorapa <dvor...@seznam.cz>
Gerrit-Reviewer: John Vandenberg <jay...@gmail.com>
Gerrit-Reviewer: Xqt <i...@gno.de>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1...@gmail.com>
Gerrit-Reviewer: Zoranzoki21 <zorandori4...@gmail.com>
Gerrit-Reviewer: jenkins-bot <>
_______________________________________________
Pywikibot-commits mailing list
Pywikibot-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/pywikibot-commits

Reply via email to