jenkins-bot has submitted this change and it was merged. ( 
https://gerrit.wikimedia.org/r/551538 )

Change subject: [flake8] Solve flake8-comprehensions 3.1.1 issues
......................................................................

[flake8] Solve flake8-comprehensions 3.1.1 issues

Change-Id: Ic93949a6e84db94a2f651119929304ba221f41ee
---
M pywikibot/data/api.py
M pywikibot/data/wikistats.py
M pywikibot/page.py
M pywikibot/site.py
M pywikibot/userinterfaces/terminal_interface_base.py
M scripts/checkimages.py
M scripts/delete.py
M scripts/makecat.py
M scripts/solve_disambiguation.py
M tests/site_tests.py
M tests/xmlreader_tests.py
11 files changed, 21 insertions(+), 30 deletions(-)

Approvals:
  Framawiki: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 17f5a05..5e6b1a6 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -2176,7 +2176,7 @@
                 max(login_status, pywikibot.site.LoginStatus.NOT_LOGGED_IN))
             user_key = repr(user_key)

-        request_key = repr(sorted(list(self._encoded_items().items())))
+        request_key = repr(sorted(self._encoded_items().items()))
         return repr(self.site) + user_key + request_key

     def _create_file_name(self):
diff --git a/pywikibot/data/wikistats.py b/pywikibot/data/wikistats.py
index 14175d8..2f2d06f 100644
--- a/pywikibot/data/wikistats.py
+++ b/pywikibot/data/wikistats.py
@@ -153,9 +153,7 @@
             f = BytesIO(data)

         reader = csv.DictReader(f)
-
-        data = [site for site in reader]
-
+        data = list(reader)
         self._data['csv'][table] = data

         return data
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 001e3db..1a1024a 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3207,12 +3207,12 @@
     @deprecated('list(Category.subcategories(...))', since='20090307')
     def subcategoriesList(self, recurse=False):
         """DEPRECATED: Equivalent to list(self.subcategories(...))."""
-        return sorted(list(set(self.subcategories(recurse))))
+        return sorted(set(self.subcategories(recurse)))

     @deprecated('list(Category.articles(...))', since='20090307')
     def articlesList(self, recurse=False):
         """DEPRECATED: equivalent to list(self.articles(...))."""
-        return sorted(list(set(self.articles(recurse))))
+        return sorted(set(self.articles(recurse)))

     @deprecated('Category.categories()', since='20090307')
     def supercategories(self):
@@ -3222,7 +3222,7 @@
     @deprecated('list(Category.categories(...))', since='20090307')
     def supercategoriesList(self):
         """DEPRECATED: equivalent to list(self.categories(...))."""
-        return sorted(list(set(self.categories())))
+        return sorted(set(self.categories()))


 class User(Page):
diff --git a/pywikibot/site.py b/pywikibot/site.py
index c9392b0..ffa61ff 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -6208,8 +6208,7 @@
         @return: True if API returned expected response; False otherwise
         @rtype: bool
         """
-        req = self._simple_request(action='purge',
-                                   titles=[page for page in set(pages)])
+        req = self._simple_request(action='purge', titles=list(set(pages)))
         if converttitles:
             req['converttitles'] = True
         if redirects:
diff --git a/pywikibot/userinterfaces/terminal_interface_base.py 
b/pywikibot/userinterfaces/terminal_interface_base.py
index 98e0b5c..514a87b 100755
--- a/pywikibot/userinterfaces/terminal_interface_base.py
+++ b/pywikibot/userinterfaces/terminal_interface_base.py
@@ -465,7 +465,7 @@
             return [s.decode(self.encoding) for s in self.argv]
         # in python 3, self.argv is unicode and thus cannot be decoded
         except AttributeError:
-            return [s for s in self.argv]
+            return list(self.argv)


 class TerminalHandler(logging.Handler):
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 8391a31..e63139a 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -744,7 +744,7 @@
         max_usage = 0  # hold max amount of using pages
         for num, element in enumerate(listGiven):
             image = element[1]
-            image_used = len([page for page in image.usingPages()])
+            image_used = len(list(image.usingPages()))
             if image_used > max_usage:
                 max_usage = image_used
                 inx_found = num
diff --git a/scripts/delete.py b/scripts/delete.py
index b291e5a..effe936 100755
--- a/scripts/delete.py
+++ b/scripts/delete.py
@@ -206,7 +206,7 @@
                 namespaces = self.getOption('orphansonly')
                 ns_with_ref = self.current_page.namespaces_with_ref_to_page(
                     namespaces)
-                ns_with_ref = sorted(list(ns_with_ref))
+                ns_with_ref = sorted(ns_with_ref)
                 if ns_with_ref:
                     ns_names = ', '.join(str(ns.id) for ns in ns_with_ref)
                     pywikibot.output(
diff --git a/scripts/makecat.py b/scripts/makecat.py
index 65d60a4..899951e 100755
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -111,9 +111,8 @@
             except pywikibot.IsRedirectPage:
                 cl = True
             else:
-                cats = [x for x in pl.categories()]
+                cats = list(pl.categories())
                 if workingcat not in cats:
-                    cats = [x for x in pl.categories()]
                     for c in cats:
                         if c in parentcats:
                             if self.removeparent:
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index a20f64f..3e6d5fe 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -410,12 +410,9 @@
     def __iter__(self):
         """Yield pages."""
         # TODO: start yielding before all referring pages have been found
-        refs = [
-            page for page in self.disambPage.getReferences(
-                with_template_inclusion=False,
-                namespaces=0 if self.main_only else None
-            )
-        ]
+        refs = list(self.disambPage.getReferences(
+            with_template_inclusion=False,
+            namespaces=0 if self.main_only else None))
         pywikibot.output('Found {0} references.'.format(len(refs)))
         # Remove ignorables
         if self.disambPage.site.family.name in ignore_title and \
diff --git a/tests/site_tests.py b/tests/site_tests.py
index a1f7c36..3aa3e12 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -2435,22 +2435,22 @@

         gen = mysite.pagecategories(mypage, total=12)
         gen.set_query_increment = 5
-        cats = [c for c in gen]
+        cats = list(gen)
         self.assertLength(cats, 12)

         gen = mysite.categorymembers(mycat, total=12)
         gen.set_query_increment = 5
-        cat_members = [cm for cm in gen]
+        cat_members = list(gen)
         self.assertLength(cat_members, 12)

         gen = mysite.pageimages(mypage, total=5)
         gen.set_query_increment = 3
-        images = [im for im in gen]
+        images = list(gen)
         self.assertLength(images, 5)

         gen = mysite.pagetemplates(mypage, total=5)
         gen.set_query_increment = 3
-        templates = [tl for tl in gen]
+        templates = list(gen)
         self.assertLength(templates, 5)

         mysite.loadrevisions(mypage, step=5, total=12)
diff --git a/tests/xmlreader_tests.py b/tests/xmlreader_tests.py
index 190c810..227d5cd 100644
--- a/tests/xmlreader_tests.py
+++ b/tests/xmlreader_tests.py
@@ -21,9 +21,8 @@

     def _get_entries(self, filename, **kwargs):
         """Get all entries via XmlDump."""
-        entries = [r for r in
-                   xmlreader.XmlDump(join_xml_data_path(filename),
-                                     **kwargs).parse()]
+        entries = list(xmlreader.XmlDump(join_xml_data_path(filename),
+                                         **kwargs).parse())
         return entries


@@ -55,9 +54,8 @@
     def test_XmlDumpRedirect(self):
         """Test XmlDump correctly parsing whether a page is a redirect."""
         self._get_entries('article-pyrus.xml', allrevisions=True)
-        pages = [r for r in
-                 xmlreader.XmlDump(
-                     join_xml_data_path('article-pyrus.xml')).parse()]
+        pages = list(xmlreader.XmlDump(
+            join_xml_data_path('article-pyrus.xml')).parse())
         self.assertTrue(pages[0].isredirect)

     def _compare(self, previous, variant, all_revisions):

--
To view, visit https://gerrit.wikimedia.org/r/551538
To unsubscribe, or for help writing mail filters, visit 
https://gerrit.wikimedia.org/r/settings

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Ic93949a6e84db94a2f651119929304ba221f41ee
Gerrit-Change-Number: 551538
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <[email protected]>
Gerrit-Reviewer: Framawiki <[email protected]>
Gerrit-Reviewer: Zoranzoki21 <[email protected]>
Gerrit-Reviewer: jenkins-bot (75)
_______________________________________________
Pywikibot-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikibot-commits

Reply via email to