Revision: 7123
Author:   alexsh
Date:     2009-08-06 21:57:43 +0000 (Thu, 06 Aug 2009)

Log Message:
-----------
*Site().mediawiki_message(): make temporary remedial when API query messages 
got KeyError (turn off API, back to ordinary)
*let all query.GetData() use current available Site() in all functions, not 
create again in query.py

Modified Paths:
--------------
    trunk/pywikipedia/wikipedia.py

Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py      2009-08-06 21:56:05 UTC (rev 7122)
+++ trunk/pywikipedia/wikipedia.py      2009-08-06 21:57:43 UTC (rev 7123)
@@ -919,7 +919,7 @@
             'prop'      :'info',
             'titles'    :self.title(),
             }
-        data = query.GetData(params, encodeTitle = False)        
+        data = query.GetData(params, self.site(), encodeTitle = False)        
         pageid = data['query']['pages'].keys()[0]
         if data['query']['pages'][pageid].keys()[0] == 'lastrevid':
             return data['query']['pages'][pageid]['lastrevid'] # if ok,
@@ -951,7 +951,7 @@
             'tllimit'   :tllimit,
             }
 
-        data = query.GetData(params, encodeTitle = False)
+        data = query.GetData(params, self.site(), encodeTitle = False)
         try:
             pageid = data['query']['pages'].keys()[0]
         except KeyError:
@@ -1325,7 +1325,7 @@
         #if titles:
         #    predata['titles'] = query.ListToParam(titles)
         
-        text = query.GetData(predata)['query']['pages']
+        text = query.GetData(predata, self.site())['query']['pages']
         
         for pageid in text:
             if text[pageid].has_key('missing'):
@@ -2784,7 +2784,7 @@
             'titles'    :self.title(),
             }
         try:
-            data = query.GetData(params, encodeTitle = False)['query']['pages']
+            data = query.GetData(params, self,site(), encodeTitle = 
False)['query']['pages']
         except KeyError:
             raise NoPage(u'API Error, nothing found in the APIs')
 
@@ -2848,7 +2848,7 @@
             'titles'    :self.title(),
             'iiprop'    :'url',
         }
-        imagedata = query.GetData(params, encodeTitle = False)
+        imagedata = query.GetData(params, self.site(), encodeTitle = False)
         try:
             url=imagedata['query']['pages'].values()[0]['imageinfo'][0]['url']
 #        urlR = re.compile(r'<div class="fullImageLink" id="file">.*?<a 
href="(?P<url>[^ ]+?)"(?! class="image")|<span class="dangerousLink"><a 
href="(?P<url2>.+?)"', re.DOTALL)
@@ -2918,7 +2918,7 @@
             'prop'      :'imageinfo',
             'titles'    :self.title(),
             }
-        data = query.GetData(params, encodeTitle = False)
+        data = query.GetData(params, self.site(), encodeTitle = False)
         try:
             # We don't know the page's id, if any other better idea please 
change it
             pageid = data['query']['pages'].keys()[0]
@@ -2940,7 +2940,7 @@
                 'iiprop'    :'sha1',
                 }
             # First of all we need the Hash that identify an image
-            data = query.GetData(params, encodeTitle = False)
+            data = query.GetData(params, self.site(), encodeTitle = False)
             pageid = data['query']['pages'].keys()[0]
             try:
                 hash_found = 
data['query']['pages'][pageid][u'imageinfo'][0][u'sha1']
@@ -5103,6 +5103,7 @@
         """Return the MediaWiki message text for key "key" """
         # Allmessages is retrieved once for all per created Site object
         if not self._mediawiki_messages:
+            api = False
             if verbose:
                 output(
                   u"Retrieving mediawiki messages from Special:Allmessages")
@@ -5110,6 +5111,8 @@
             if self.versionnumber() < 12:
                 usePHP = True
             else:
+                if config.use_api:
+                    api = True
                 usePHP = False
                 elementtree = True
                 try:
@@ -5133,20 +5136,22 @@
 
             retry_idle_time = 1
             while True:
-                if config.use_api and self.versionnumber() >= 12:
+                if api and self.versionnumber() >= 12:
                     params = {
                         'action':'query',
                         'meta':'allmessages',
                     }
                     try:
-                        datas = query.GetData(params)['query']['allmessages']
+                        datas = query.GetData(params, 
self)['query']['allmessages']
+                        self._mediawiki_messages = 
_dict([(tag['name'].lower(), tag['*'])
+                                for tag in datas])
                     except KeyError:
-                        raise ServerError("The APIs don't return data, the 
site may be down")
+                        output('API get messages had some error, retrying by 
ordinary.')
+                        api = False
+                        continue
                     except NotImplementedError:
-                        config.use_api = False
+                        api = False
                         continue
-                    self._mediawiki_messages = _dict([(tag['name'].lower(), 
tag['*'])
-                            for tag in datas])
                 elif usePHP:
                     phppage = 
self.getUrl(self.get_address("Special:Allmessages")
                                       + "&ot=php")
@@ -5522,7 +5527,7 @@
         if leuser is not None: params['leuser'] = leuser
         if letitle is not None: params['letitle'] = letitle
         while True:
-            data = query.GetData(params, encodeTitle = False)
+            data = query.GetData(params, self, encodeTitle = False)
             try:
                 imagesData = data['query']['logevents']
             except KeyError:
@@ -5590,7 +5595,7 @@
         if rcshow is not None: params['rcshow'] = rcshow
         if rctype is not None: params['rctype'] = rctype
         while True:
-            data = query.GetData(params, encodeTitle = False)
+            data = query.GetData(params, self, encodeTitle = False)
             try:
                 rcData = data['query']['recentchanges']
             except KeyError:
@@ -5726,7 +5731,7 @@
                 'rnlimit': '1',
                 #'': '',
             }
-            data = query.GetData(params)
+            data = query.GetData(params, self)
             return Page(self, data['query']['random'][0]['title'])
         else:
             """Yield random page via Special:Random"""
@@ -5744,7 +5749,7 @@
                 'rnlimit': '1',
                 'rnredirect': '1',
             }
-            data = query.GetData(params)
+            data = query.GetData(params, self)
             return Page(self, data['query']['random'][0]['title'])
         else:
             """Yield random redirect page via Special:RandomRedirect."""
@@ -5798,7 +5803,7 @@
             params['apfrom'] = start
             if throttle:
                 get_throttle()
-            data = query.GetData(params)
+            data = query.GetData(params, self)
             
             for p in data['query']['allpages']:
                 yield Page(self, p['title'])
@@ -5990,7 +5995,7 @@
                     'euquery': url,
                 }
                 while True:
-                    data = query.GetData(params)
+                    data = query.GetData(params, self)
                     if data['query']['exturlusage'] == []:
                         break
                     for pages in data['query']['exturlusage']:
@@ -6606,7 +6611,7 @@
             'list'      :'allimages',
             'aisha1'    :hash_found,
         }
-        allimages = query.GetData(params, getSite(self.lang, self.family), 
encodeTitle = False)['query']['allimages']
+        allimages = query.GetData(params, self, encodeTitle = 
False)['query']['allimages']
         files = list()
         for imagedata in allimages:
             image = imagedata[u'name']



_______________________________________________
Pywikipedia-svn mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-svn

Reply via email to