Mpaa has uploaded a new change for review.

  https://gerrit.wikimedia.org/r/104211


Change subject: Bug 58944 - site.mediawiki_message: support multiple messages
......................................................................

Bug 58944 - site.mediawiki_message: support multiple messages

site.mediawiki_message() has been expanded to handle request of several 
messages.

https://bugzilla.wikimedia.org/show_bug.cgi?id=58944

Change-Id: I64710f67c04a0d2b93c70d9edd472f8bab4ce309
---
M pywikibot/site.py
M tests/site_tests.py
2 files changed, 54 insertions(+), 122 deletions(-)


  git pull ssh://gerrit.wikimedia.org:29418/pywikibot/core 
refs/changes/11/104211/1

diff --git a/pywikibot/site.py b/pywikibot/site.py
index a090d04..85614dd 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -34,9 +34,6 @@
 from pywikibot.data import api
 from pywikibot.exceptions import *
 
-if sys.version_info[0] > 2:
-    basestring = (str,)
-
 _logger = "wiki.site"
 
 
@@ -64,7 +61,7 @@
 
     @classmethod
     def name(cls, search_value):
-        for key, value in cls.__dict__.items():
+        for key, value in cls.__dict__.iteritems():
             if key == key.upper() and value == search_value:
                 return key
         raise KeyError("Value %r could not be found in this enum"
@@ -138,7 +135,7 @@
                 raise NoSuchSite("Language %s in family %s is obsolete"
                                  % (self.__code, self.__family.name))
         if self.__code not in self.languages():
-            if self.__family.name in list(self.__family.langs.keys()) and \
+            if self.__family.name in self.__family.langs.keys() and \
                len(self.__family.langs) == 1:
                 oldcode = self.__code
                 self.__code = self.__family.name
@@ -261,12 +258,12 @@
     def languages(self):
         """Return list of all valid language codes for this site's Family."""
 
-        return list(self.family.langs.keys())
+        return self.family.langs.keys()
 
     def validLanguageLinks(self):
         """Return list of language codes that can be used in interwiki 
links."""
 
-        nsnames = [name for name in self.namespaces().values()]
+        nsnames = [name for name in self.namespaces().itervalues()]
         return [lang for lang in self.languages()
                 if lang[:1].upper() + lang[1:] not in nsnames]
 
@@ -997,20 +994,29 @@
             self.login(sysop)
         return 'hasmsg' in self._userinfo
 
-    def mediawiki_message(self, key):
-        """Return the MediaWiki message text for key "key" """
-        if not key in self._msgcache:
+    def mediawiki_message(self, *key):
+        """Return the MediaWiki message text for each key in "key".
+           A single message (string) is returned if only one key is requested;
+           a dict of messages is returned otherwise."""
+        if not all(_key in self._msgcache for _key in key):
             msg_query = api.QueryGenerator(site=self, meta="allmessages",
-                                           ammessages=key)
-            for msg in msg_query:
-                if msg['name'] == key and not 'missing' in msg:
-                    self._msgcache[key] = msg['*']
-                    break
-            else:
-                raise KeyError("Site %(self)s has no message '%(key)s'"
+                                           ammessages='|'.join(key))
+            for _key in key:
+                for msg in msg_query:
+                    if msg['name'] == _key and not 'missing' in msg:
+                        self._msgcache[_key] = msg['*']
+                        break
+                else:
+                    raise KeyError("Site %(self)s has no message '%(_key)s'"
                                % locals())
-        return self._msgcache[key]
-
+                               
+        if len(key) == 1:
+            #only one message is requested
+            return self._msgcache[key[0]]
+        else:
+            #several messages are requested
+            return dict((_key, self._msgcache[_key]) for _key in key)
+        
     def has_mediawiki_message(self, key):
         """Return True iff this site defines a MediaWiki message for 'key'."""
         try:
@@ -1381,7 +1387,6 @@
     def page_isredirect(self, page):
         """Return True if and only if page is a redirect."""
         if not hasattr(page, "_isredir"):
-            page._isredir = False  # bug 54684
             self.loadpageinfo(page)
         return page._isredir
 
@@ -1411,7 +1416,7 @@
         if target_title == title or "pages" not in result['query']:
             # no "pages" element indicates a circular redirect
             raise pywikibot.CircularRedirect(redirmap[title])
-        pagedata = list(result['query']['pages'].values())[0]
+        pagedata = result['query']['pages'].values()[0]
             # there should be only one value in 'pages', and it is the target
         if self.sametitle(pagedata['title'], target_title):
             target = pywikibot.Page(self, pagedata['title'], pagedata['ns'])
@@ -1454,7 +1459,7 @@
                 # only use pageids if all pages have them
                 rvgen.request["pageids"] = "|".join(pageids)
             else:
-                rvgen.request["titles"] = "|".join(list(cache.keys()))
+                rvgen.request["titles"] = "|".join(cache.keys())
             rvgen.request[u"rvprop"] = 
u"ids|flags|timestamp|user|comment|content"
             pywikibot.output(u"Retrieving %s pages from %s."
                              % (len(cache), self))
@@ -1481,7 +1486,7 @@
                 except KeyError:
                     pywikibot.debug(u"No 'title' in %s" % pagedata, _logger)
                     pywikibot.debug(u"pageids=%s" % pageids, _logger)
-                    pywikibot.debug(u"titles=%s" % list(cache.keys()), _logger)
+                    pywikibot.debug(u"titles=%s" % cache.keys(), _logger)
                     continue
                 page = cache[pagedata['title']]
                 api.update_page(page, pagedata)
@@ -1563,7 +1568,7 @@
                         namespaces=namespaces,
                         content=content
                     )
-            return itertools.chain(*list(genlist.values()))
+            return itertools.chain(*genlist.values())
         return blgen
 
     def page_embeddedin(self, page, filterRedirects=None, namespaces=None,
@@ -2433,7 +2438,7 @@
         if where not in ("text", "titles"):
             raise Error("search: unrecognized 'where' value: %s" % where)
         if namespaces == []:
-            namespaces = [ns for ns in list(self.namespaces().keys()) if ns >= 
0]
+            namespaces = [ns for ns in self.namespaces().keys() if ns >= 0]
         if not namespaces:
             pywikibot.warning(u"search: namespaces cannot be empty; using 
[0].")
             namespaces = [0]
@@ -2692,6 +2697,7 @@
 
         @param page: The Page to be saved; its .text property will be used
             as the new text to be saved to the wiki
+        @param token: the edit token retrieved using Site.token()
         @param summary: the edit summary (required!)
         @param minor: if True (default), mark edit as minor
         @param notminor: if True, override account preferences to mark edit
@@ -2763,7 +2769,7 @@
                 result = req.submit()
                 pywikibot.debug(u"editpage response: %s" % result,
                                 _logger)
-            except api.APIError as err:
+            except api.APIError, err:
                 self.unlock_page(page)
                 if err.code.endswith("anon") and self.logged_in():
                     pywikibot.debug(
@@ -2907,7 +2913,7 @@
             result = req.submit()
             pywikibot.debug(u"movepage response: %s" % result,
                             _logger)
-        except api.APIError as err:
+        except api.APIError, err:
             if err.code.endswith("anon") and self.logged_in():
                 pywikibot.debug(
                     u"movepage: received '%s' even though bot is logged in"
@@ -2963,7 +2969,7 @@
                 % page.title(asLink=True))
         last_rev = page._revisions[page.latestRevision()]
         last_user = last_rev.user
-        for rev in sorted(list(page._revisions.keys()), reverse=True):
+        for rev in sorted(page._revisions.keys(), reverse=True):
             # start with most recent revision first
             if rev.user != last_user:
                 prev_user = rev.user
@@ -2984,7 +2990,7 @@
                           token=token)
         try:
             result = req.submit()
-        except api.APIError as err:
+        except api.APIError, err:
             errdata = {
                 'site': self,
                 'title': page.title(withSection=False),
@@ -3016,7 +3022,7 @@
         """
         try:
             self.login(sysop=True)
-        except pywikibot.NoUsername as e:
+        except pywikibot.NoUsername, e:
             raise NoUsername("delete: Unable to login as sysop (%s)"
                              % e.__class__.__name__)
         if not self.logged_in(sysop=True):
@@ -3028,7 +3034,7 @@
                           reason=summary)
         try:
             result = req.submit()
-        except api.APIError as err:
+        except api.APIError, err:
             errdata = {
                 'site': self,
                 'title': page.title(withSection=False),
@@ -3037,60 +3043,6 @@
             if err.code in self._dl_errors:
                 raise Error(self._dl_errors[err.code] % errdata)
             pywikibot.debug(u"delete: Unexpected error code '%s' received."
-                            % err.code,
-                            _logger)
-            raise
-        finally:
-            self.unlock_page(page)
-
-    _protect_errors = {
-        "noapiwrite": "API editing not enabled on %(site)s wiki",
-        "writeapidenied": "User %(user)s not allowed to edit through the API",
-        "permissiondenied": "User %(user)s not authorized to protect pages on 
%(site)s wiki.",
-        "cantedit": "User %(user) can't protect this page because user %(user) 
can't edit it.",
-        "protect-invalidlevel": "Invalid protection level"
-    }
-
-    def protect(self, page, edit, move, summary):
-        """(Un)protect a wiki page. Requires administrator status.
-
-        Valid protection levels (in MediaWiki 1.12) are '' (equivalent to
-        'none'), 'autoconfirmed', and 'sysop'.
-
-        @param edit: Level of edit protection
-        @param move: Level of move protection
-        @param unprotect: If true, unprotect the page (equivalent to setting
-            all protection levels to '')
-        @param reason: Edit summary.
-        @param prompt: If true, ask user for confirmation.
-
-        """
-        try:
-            self.login(sysop=True)
-        except pywikibot.NoUsername as e:
-            raise NoUsername("protect: Unable to login as sysop (%s)"
-                             % e.__class__.__name__)
-        if not self.logged_in(sysop=True):
-            raise NoUsername("protect: Unable to login as sysop")
-        token = self.token(page, "protect")
-        self.lock_page(page)
-        req = api.Request(site=self, action="protect", token=token,
-                          title=page.title(withSection=False),
-                          protections="edit=" + edit + "|" + "move=" + move,
-                          reason=summary)
-        try:
-            result = req.submit()
-        except api.APIError as err:
-            errdata = {
-                'site': self,
-                'title': page.title(withSection=False),
-                'user': self.user(),
-                'level-edit': edit,
-                'level-move': move
-            }
-            if err.code in self._protect_errors:
-                raise Error(self._protect_errors[err.code] % errdata)
-            pywikibot.debug(u"protect: Unexpected error code '%s' received."
                             % err.code,
                             _logger)
             raise
@@ -3260,13 +3212,13 @@
             req["ignorewarnings"] = ""
         try:
             result = req.submit()
-        except api.APIError as err:
+        except api.APIError, err:
             # TODO: catch and process foreseeable errors
             raise
         result = result["upload"]
         pywikibot.debug(result, _logger)
         if "warnings" in result:
-            warning = list(result["warnings"].keys())[0]
+            warning = result["warnings"].keys()[0]
             message = result["warnings"][warning]
             raise pywikibot.UploadWarning(upload_warnings[warning]
                                           % {'msg': message})
@@ -3595,7 +3547,7 @@
         req = api.Request(site=self, **params)
         data = req.submit()
         if not 'success' in data:
-            raise pywikibot.data.api.APIError(data['errors'])
+            raise pywikibot.data.api.APIError, data['errors']
         return data['entities']
 
     def preloaditempages(self, pagelist, groupsize=50):
@@ -3762,12 +3714,8 @@
                 datavalue = {'type': 'wikibase-entityid',
                              'value': sourceclaim._formatDataValue(),
                              }
-            elif sourceclaim.getType() in ['string', 'url']:
+            elif sourceclaim.getType() == 'string':
                 datavalue = {'type': 'string',
-                             'value': sourceclaim._formatDataValue(),
-                             }
-            elif sourceclaim.getType() == 'time':
-                datavalue = {'type': 'time',
                              'value': sourceclaim._formatDataValue(),
                              }
             else:
@@ -3787,6 +3735,7 @@
             # if present, all claims of one source have the same hash
             if not new and hasattr(sourceclaim, 'hash'):
                 params['reference'] = sourceclaim.hash
+
         params['snaks'] = json.dumps(snak)
         for arg in kwargs:
             if arg in ['baserevid', 'summary']:
@@ -3838,29 +3787,6 @@
         if bot:
             params['bot'] = 1
         params['claim'] = '|'.join(claim.snak for claim in claims)
-        params['token'] = self.token(pywikibot.Page(self, u'Main Page'),
-                                     'edit')  # Use a dummy page
-        for kwarg in kwargs:
-            if kwarg in ['baserevid', 'summary']:
-                params[kwarg] = kwargs[kwarg]
-        req = api.Request(site=self, **params)
-        data = req.submit()
-        return data
-
-    @must_be(group='user')
-    def removeSources(self, claim, sources, bot=True, **kwargs):
-        """
-        Removes sources.
-        @param claim A Claim object to remove the sources from
-        @type claim pywikibot.Claim
-        @param sources A list of Claim objects that are sources
-        @type sources pywikibot.Claim
-        """
-        params = dict(action='wbremovereferences')
-        if bot:
-            params['bot'] = 1
-        params['statement'] = claim.snak
-        params['references'] = '|'.join(source.hash for source in sources)
         params['token'] = self.token(pywikibot.Page(self, u'Main Page'),
                                      'edit')  # Use a dummy page
         for kwarg in kwargs:
diff --git a/tests/site_tests.py b/tests/site_tests.py
index ce7b930..d50971e 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -30,7 +30,7 @@
         global mysite, mainpage, imagepage
         mysite = pywikibot.Site(cls.code, cls.family)
         mainpage = pywikibot.Page(pywikibot.Link("Main Page", mysite))
-        imagepage = next(iter(mainpage.imagelinks()))  # 1st image on main page
+        imagepage = iter(mainpage.imagelinks()).next()  # 1st image on main 
page
 
     def testBaseMethods(self):
         """Test cases for BaseSite methods"""
@@ -105,15 +105,15 @@
                             for b in builtins))
         ns = mysite.namespaces()
         self.assertType(ns, dict)
-        self.assertTrue(all(x in ns for x in range(0, 16)))
+        self.assertTrue(all(x in ns for x in xrange(0, 16)))
             # built-in namespaces always present
         self.assertType(mysite.ns_normalize("project"), basestring)
         self.assertTrue(all(isinstance(key, int)
                             for key in ns))
         self.assertTrue(all(isinstance(val, list)
-                            for val in ns.values()))
+                            for val in ns.itervalues()))
         self.assertTrue(all(isinstance(name, basestring)
-                            for val in ns.values()
+                            for val in ns.itervalues()
                             for name in val))
         self.assertTrue(all(isinstance(mysite.namespace(key), basestring)
                             for key in ns))
@@ -150,6 +150,12 @@
             self.assertType(mysite.mediawiki_message(msg), basestring)
         self.assertFalse(mysite.has_mediawiki_message("nosuchmessage"))
         self.assertRaises(KeyError, mysite.mediawiki_message, "nosuchmessage")
+
+        msg = ("1movedto2", "about", "aboutpage")
+        self.assertType(mysite.mediawiki_message(*msg), dict)
+        msg = ("1movedto2", "about", "aboutpage", "nosuchmessage")
+        self.assertRaises(KeyError, mysite.mediawiki_message, *msg)
+        
         self.assertType(mysite.getcurrenttimestamp(), basestring)
         self.assertType(mysite.siteinfo, dict)
         self.assertType(mysite.case(), basestring)
@@ -471,7 +477,7 @@
                 self.assertTrue(prop in block)
         # timestamps should be in descending order
         timestamps = [block['timestamp'] for block in bl]
-        for t in range(1, len(timestamps)):
+        for t in xrange(1, len(timestamps)):
             self.assertTrue(timestamps[t] <= timestamps[t - 1])
 
         b2 = list(mysite.blocks(total=10, reverse=True))
@@ -482,7 +488,7 @@
                 self.assertTrue(prop in block)
         # timestamps should be in ascending order
         timestamps = [block['timestamp'] for block in b2]
-        for t in range(1, len(timestamps)):
+        for t in xrange(1, len(timestamps)):
             self.assertTrue(timestamps[t] >= timestamps[t - 1])
 
         for block in mysite.blocks(starttime="2008-07-01T00:00:01Z", total=5):

-- 
To view, visit https://gerrit.wikimedia.org/r/104211
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: I64710f67c04a0d2b93c70d9edd472f8bab4ce309
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Mpaa <[email protected]>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to