Revision: 6414
Author:   filnik
Date:     2009-02-22 18:12:08 +0000 (Sun, 22 Feb 2009)

Log Message:
-----------
Bugfix for the report, otherwise it's impossible to strike the already seen 
images

Modified Paths:
--------------
    trunk/pywikipedia/checkimages.py

Modified: trunk/pywikipedia/checkimages.py
===================================================================
--- trunk/pywikipedia/checkimages.py    2009-02-22 16:21:11 UTC (rev 6413)
+++ trunk/pywikipedia/checkimages.py    2009-02-22 18:12:08 UTC (rev 6414)
@@ -799,7 +799,7 @@
         """ Checking if the file is on commons """
         wikipedia.output(u'Checking if %s is on commons...' % self.imageName)
         commons_site = wikipedia.getSite('commons', 'commons')
-        regexOnCommons = r"\n\*\[\[:File:%s\]\] is also on '''Commons''': 
\[\[commons:File:.*?\]\](?: \(same name\)|)$" % re.escape(self.imageName)
+        regexOnCommons = r"\[\[:File:%s\]\] is also on '''Commons''': 
\[\[commons:File:.*?\]\](?: \(same name\)|)$" % re.escape(self.imageName)
         hash_found = self.image.getHash()
         if hash_found == None:
             return False # Problems? Yes! Image deleted, no hash found. Skip 
the image.
@@ -841,7 +841,7 @@
         dupTalkText = wikipedia.translate(self.site, duplicates_user_talk_text)
         dupComment_talk = wikipedia.translate(self.site, 
duplicates_comment_talk)
         dupComment_image = wikipedia.translate(self.site, 
duplicates_comment_image)
-        duplicateRegex = r'\n\*(?:\[\[:File:%s\]\] has the following 
duplicates(?: \(\'\'\'forced mode\'\'\'\)|):|\*\[\[:File:%s\]\])$' % 
(re.escape(self.convert_to_url(self.imageName)), 
re.escape(self.convert_to_url(self.imageName)))
+        duplicateRegex = r'(?:\[\[:File:%s\]\] has the following duplicates(?: 
\(\'\'\'forced mode\'\'\'\)|):|\*\[\[:File:%s\]\])$' % 
(re.escape(self.convert_to_url(self.imageName)), 
re.escape(self.convert_to_url(self.imageName)))
         imagePage = wikipedia.ImagePage(self.site, u'File:%s' % self.imageName)
         hash_found = imagePage.getHash()
         duplicates = self.site.getFilesFromAnHash(hash_found)



_______________________________________________
Pywikipedia-l mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-l

Reply via email to