http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11327

Revision: 11327
Author:   xqt
Date:     2013-04-03 19:50:12 +0000 (Wed, 03 Apr 2013)
Log Message:
-----------
extract_templates_and_params() call with asDict=True;
strip trailing white space, some PEP8 changes

Modified Paths:
--------------
    trunk/pywikipedia/subster.py

Modified: trunk/pywikipedia/subster.py
===================================================================
--- trunk/pywikipedia/subster.py        2013-04-03 19:39:46 UTC (rev 11326)
+++ trunk/pywikipedia/subster.py        2013-04-03 19:50:12 UTC (rev 11327)
@@ -267,7 +267,7 @@
         for item in params:
             # 1st stage: main/general content substitution
             # 1.) - 5.) subst templates
-            metadata = { 'bot-error':           unicode(False), 
+            metadata = { 'bot-error':           unicode(False),
                          'bot-error-traceback': u'', }  # DRTRIGON-132
             try:
                 (substed_content, tags, md) = 
self.subTemplate(substed_content, item)
@@ -318,16 +318,18 @@
 
            Returns a tuple containig the new content with tags
            substituted and a list of those tags.
+
         """
 
         substed_tags = []  # DRTRIGON-73
-        metadata     = { 'mw-signature': u'~~~~', 
-                         'mw-timestamp': u'~~~~~', }  # DRTRIGON-132
+        metadata     = {'mw-signature': u'~~~~',
+                        'mw-timestamp': u'~~~~~',}  # DRTRIGON-132
 
         # 0.2.) check for 'simple' mode and get additional params
         if param['simple']:
             p = self.site.getExpandedString(param['simple'])
-            param.update( pywikibot.extract_templates_and_params(p)[0][1] )
+            param.update(
+                pywikibot.extract_templates_and_params(p, asDict=True)[0][1])
 
         # 0.5.) check cron/date
         if param['cron']:
@@ -337,9 +339,14 @@
                 param['cron'] = '* * ' + param['cron']
             entry = crontab.CronTab(param['cron'])
             # find the delay from midnight (does not return 0.0 - but next)
-            delay = 
entry.next(datetime.datetime.now().replace(hour=0,minute=0,second=0,microsecond=0)-datetime.timedelta(microseconds=1))
+            delay = entry.next(datetime.datetime.now().replace(hour=0,
+                                                               minute=0,
+                                                               second=0,
+                                                               microsecond=0)- 
\
+                               datetime.timedelta(microseconds=1))
 
-            pywikibot.output(u'CRON delay for execution: %.3f (<= %i)' % 
(delay, bot_config['CRONMaxDelay']))
+            pywikibot.output(u'CRON delay for execution: %.3f (<= %i)'
+                             % (delay, bot_config['CRONMaxDelay']))
 
             if not (delay <= bot_config['CRONMaxDelay']):
                 return (content, substed_tags, metadata)
@@ -348,7 +355,8 @@
         # (security: check url not to point to a local file on the server,
         #  e.g. 'file://' - same as used in xsalt.py)
         secure = False
-        for item in [u'http://', u'https://', u'mail://', u'local://', 
u'wiki://']:
+        for item in [u'http://', u'https://', u'mail://', u'local://',
+                     u'wiki://']:
             secure = secure or (param['url'][:len(item)] == item)
         param['zip'] = ast.literal_eval(param['zip'])
         if not secure:
@@ -356,19 +364,24 @@
         if   (param['url'][:7] == u'wiki://'):
             url = param['url'][7:].strip('[]')              # enable wiki-links
             if ast.literal_eval(param['expandtemplates']):  # DRTRIGON-93 
(only with 'wiki://')
-                external_buffer = pywikibot.Page(self.site, 
url).get(expandtemplates=True)
+                external_buffer = pywikibot.Page(self.site,
+                                                 url).get(expandtemplates=True)
             else:
                 external_buffer = self.load( pywikibot.Page(self.site, url) )
         elif (param['url'][:7] == u'mail://'):              # DRTRIGON-101
             url = param['url'].replace(u'{{@}}', u'@')     # e.g. nlwiki
-            mbox = 
SubsterMailbox(pywikibot.config.datafilepath(bot_config['data_path'], 
bot_config['mbox_file'], ''))
+            mbox = SubsterMailbox(
+                pywikibot.config.datafilepath(bot_config['data_path'],
+                                              bot_config['mbox_file'], ''))
             external_buffer = mbox.find_data(url)
             mbox.close()
         elif (param['url'][:8] == u'local://'):             # DRTRIGON-131
             if (param['url'][8:] == u'cache/state_bots'):
                 # filename hard-coded
-                d = shelve.open(pywikibot.config.datafilepath('cache', 
'state_bots'))
-                external_buffer = 
pprint.pformat(ast.literal_eval(pprint.pformat(d)))
+                d = shelve.open(pywikibot.config.datafilepath('cache',
+                                                              'state_bots'))
+                external_buffer = pprint.pformat(
+                    ast.literal_eval(pprint.pformat(d)))
                 d.close()
             else:
                 external_buffer = u'n/a'
@@ -378,12 +391,13 @@
             # on page, if the user placed them, else use the conventional mode.
             # http://www.diveintopython.net/http_web_services/etags.html
             f_url, external_buffer = http.request(self.site, param['url'],
-                                                  no_hostname = True, 
+                                                  no_hostname = True,
                                                   back_response = True)
             headers = f_url.headers # same like 'f_url.info()'
             #if param['zip']:
             if ('text/' not in headers['content-type']):
-                pywikibot.output(u'Source is of non-text content-type, using 
raw data instead.')
+                pywikibot.output(u'Source is of non-text content-type, '
+                                 u'using raw data instead.')
                 external_buffer = f_url.read()
             del f_url               # free some memory (no need to keep copy)
 
@@ -413,7 +427,8 @@
             if external_data:    # not None
                 external_data = external_data.groups()
 
-                pywikibot.output(u'Groups found by regex: %i' % 
len(external_data))
+                pywikibot.output(u'Groups found by regex: %i'
+                                 % len(external_data))
 
                 # DRTRIGON-114: Support for named groups in regexs
                 if regex.groupindex:
@@ -474,7 +489,7 @@
 
     def subTag(self, content, value, external_data=u'~~~~', count=1):
         """Substitute one single tag (of a template) in content.
-        
+
            Can also be (ab)used to check for presence of a tag.
         """
         substed_tags = []


_______________________________________________
Pywikipedia-svn mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-svn

Reply via email to