Revision: 7659
Author:   alexsh
Date:     2009-11-16 19:36:58 +0000 (Mon, 16 Nov 2009)

Log Message:
-----------
wikipedia.py:
* new U2RedirectHandler(): to fix urllib2 auto-follow HTTP 302 target location.
* return back cookie datas when HTTP 302
login.py:
* fix login fail in ordinary when MediaWiki login redirect to MainPage (tested 
on wikia ).

Modified Paths:
--------------
    trunk/pywikipedia/login.py
    trunk/pywikipedia/wikipedia.py

Modified: trunk/pywikipedia/login.py
===================================================================
--- trunk/pywikipedia/login.py  2009-11-16 16:59:12 UTC (rev 7658)
+++ trunk/pywikipedia/login.py  2009-11-16 19:36:58 UTC (rev 7659)
@@ -143,11 +143,12 @@
             predata = {
                 "wpName": self.username.encode(self.site.encoding()),
                 "wpPassword": self.password,
-                "wpDomain": self.site.family.ldapDomain,     # VistaPrint fix
                 "wpLoginattempt": "Aanmelden & Inschrijven", # dutch button 
label seems to work for all wikis
                 "wpRemember": str(int(bool(remember))),
                 "wpSkipCookieCheck": '1'
             }
+            if self.site.family.ldapDomain:     # VistaPrint fix
+                predata["wpDomain"] = self.site.family.ldapDomain
             if captcha:
                 predata["wpCaptchaId"] = captcha['id']
                 predata["wpCaptchaWord"] = captcha['answer']
@@ -182,7 +183,11 @@
         Reat=re.compile(': (.*?)=(.*?);')
     
         L = {}
-        for eat in response.info().getallmatchingheaders('set-cookie'):
+        if hasattr(response, 'sheaders'):
+            ck = response.sheaders
+        else:
+            ck = response.info().getallmatchingheaders('set-cookie')
+        for eat in ck:
             m = Reat.search(eat)
             if m:
                 L[m.group(1)] = m.group(2)

Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py      2009-11-16 16:59:12 UTC (rev 7658)
+++ trunk/pywikipedia/wikipedia.py      2009-11-16 19:36:58 UTC (rev 7659)
@@ -5520,10 +5520,14 @@
                 raise
         
         # check cookies return or not, if return, send its to update.
-        if f.info().getallmatchingheaders('set-cookie'):
+        if hasattr(f, 'sheaders'):
+            ck = f.sheaders
+        else:
+            ck = f.info().getallmatchingheaders('set-cookie')
+        if ck:
             Reat=re.compile(': (.*?)=(.*?);')
             tmpc = {}
-            for d in f.info().getallmatchingheaders('set-cookie'):
+            for d in ck:
                 m = Reat.search(d)
                 if m: tmpc[m.group(1)] = m.group(2)
             if self.cookies(sysop):
@@ -5639,10 +5643,14 @@
                 
                 raise
         # check cookies return or not, if return, send its to update.
-        if not no_hostname and f.info().getallmatchingheaders('set-cookie'):
+        if hasattr(f, 'sheaders'):
+            ck = f.sheaders
+        else:
+            ck = f.info().getallmatchingheaders('set-cookie')
+        if not no_hostname and ck:
             Reat=re.compile(': (.*?)=(.*?);')
             tmpc = {}
-            for d in f.info().getallmatchingheaders('set-cookie'):
+            for d in ck:
                 m = Reat.search(d)
                 if m: tmpc[m.group(1)] = m.group(2)
             self.updateCookies(tmpc, sysop)
@@ -8211,6 +8219,22 @@
     s = time.strptime(tz, "%Y-%m-%dT%H:%M:%SZ")
     return int(time.strftime("%Y%m%d%H%M%S", s))
 
+#Redirect Handler for urllib2
+class U2RedirectHandler(urllib2.HTTPRedirectHandler):
+    def http_error_301(self, req, fp, code, msg, headers):
+        result = urllib2.HTTPRedirectHandler.http_error_301(
+            self, req, fp, code, msg, headers)
+        result.code = code
+        result.sheaders = [v for v in headers.__str__().split('\n') if 
v.startswith('Set-Cookie:')]
+        return result
+
+    def http_error_302(self, req, fp, code, msg, headers):
+        result = urllib2.HTTPRedirectHandler.http_error_302(
+            self, req, fp, code, msg, headers)
+        result.code = code
+        result.sheaders = [v for v in headers.__str__().split('\n') if 
v.startswith('Set-Cookie:')]
+        return result
+
 # Site Cookies handler
 COOKIEFILE = config.datafilepath('login-data', 'cookies.lwp')
 cj = cookielib.LWPCookieJar()
@@ -8218,18 +8242,20 @@
     cj.load(COOKIEFILE)
 
 cookieProcessor = urllib2.HTTPCookieProcessor(cj)
-MyURLopener = urllib2.build_opener()
 
+
+MyURLopener = urllib2.build_opener(U2)
+
 if config.proxy['host']:
     proxyHandler = urllib2.ProxyHandler({'http':'http://%s/' % 
config.proxy['host'] })
     
-    MyURLopener = urllib2.build_opener(proxyHandler)
+    MyURLopener.add_handler(proxyHandler)
     if config.proxy['auth']:
         proxyAuth = urllib2.HTTPPasswordMgrWithDefaultRealm()
         proxyAuth.add_password(None, config.proxy['host'], 
config.proxy['auth'][0], config.proxy['auth'][1])
         proxyAuthHandler = urllib2.ProxyBasicAuthHandler(proxyAuth)
         
-        MyURLopener = urllib2.build_opener(proxyHandler, proxyAuthHandler)
+        MyURLopener.add_handler(proxyAuthHandler)
 
 if config.authenticate:
     passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
@@ -8237,14 +8263,12 @@
         passman.add_password(None, site, config.authenticate[site][0], 
config.authenticate[site][1])
     authhandler = urllib2.HTTPBasicAuthHandler(passman)
 
-    MyURLopener = urllib2.build_opener(authhandler)
-    if config.proxy['host']:
-        MyURLopener = urllib2.build_opener(authhandler, proxyHandler)
-        if config.proxy['auth']:
-            MyURLopener = urllib2.build_opener(authhandler, proxyHandler, 
proxyAuthHandler)
+    MyURLopener.add_handler(authhandler)
 
+
 if __name__ == '__main__':
     import doctest
     print 'Pywikipediabot %s' % version.getversion()
     print 'Python %s' % sys.version
     doctest.testmod()
+



_______________________________________________
Pywikipedia-svn mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/pywikipedia-svn

Reply via email to