jenkins-bot has submitted this change and it was merged.

Change subject: http.request signature
......................................................................


http.request signature

Add method, body and headers as explicit arguments of http.request.
These are the standard arguments of httplib2's request method, and
other http packages have similar arguments.
Including them as explicit arguments makes it clearer from the function
is capable of, allows static checking of invocations, and forces these
parameters into the *args of threadedhttp.HttpRequest() instead of kwargs.
It also allows *args to be removed from the http.request function signature,
as it is no longer necessary.

Also deprecate using http.request for non-site requests.

Change-Id: Id4008cd470b224ffcd3c0b894bba90a25e7611bd
---
M pywikibot/comms/http.py
M pywikibot/data/api.py
M pywikibot/data/wikidataquery.py
M pywikibot/data/wikistats.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/version.py
7 files changed, 35 insertions(+), 37 deletions(-)

Approvals:
  John Vandenberg: Looks good to me, but someone else must approve
  XZise: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 4c2b2ec..011eb59 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -21,9 +21,12 @@
 __version__ = '$Id$'
 __docformat__ = 'epytext'
 
-import sys
 import atexit
+import sys
 import time
+
+from distutils.version import StrictVersion
+from warnings import warn
 
 # Verify that a working httplib2 is present.
 try:
@@ -32,7 +35,6 @@
     print("Error: Python module httplib2 >= 0.6.0 is required.")
     sys.exit(1)
 
-from distutils.version import StrictVersion
 # httplib2 0.6.0 was released with __version__ as '$Rev$'
 #                and no module variable CA_CERTS.
 if httplib2.__version__ == '$Rev$' and 'CA_CERTS' not in httplib2.__dict__:
@@ -220,7 +222,8 @@
 
 
 @deprecate_arg('ssl', None)
-def request(site=None, uri=None, charset=None, *args, **kwargs):
+def request(site=None, uri=None, method='GET', body=None, headers=None,
+            **kwargs):
     """
     Request to Site with default error handling and response decoding.
 
@@ -244,9 +247,9 @@
     """
     assert(site or uri)
     if not site:
-        # TODO: deprecate this usage, once the library code has been
-        # migrated to using the other request methods.
-        r = fetch(uri, *args, **kwargs)
+        warn('Invoking http.request without argument site is deprecated. '
+             'Use http.fetch.', DeprecationWarning, 2)
+        r = fetch(uri, method, body, headers, **kwargs)
         return r.content
 
     baseuri = site.base_url(uri)
@@ -254,11 +257,15 @@
     kwargs.setdefault("disable_ssl_certificate_validation",
                       site.ignore_certificate_error())
 
-    format_string = kwargs.setdefault("headers", {}).get("user-agent")
-    kwargs["headers"]["user-agent"] = user_agent(site, format_string)
-    kwargs['charset'] = charset
+    if not headers:
+        headers = {}
+        format_string = None
+    else:
+        format_string = headers.get('user-agent', None)
 
-    r = fetch(baseuri, *args, **kwargs)
+    headers['user-agent'] = user_agent(site, format_string)
+
+    r = fetch(baseuri, method, body, headers, **kwargs)
     return r.content
 
 
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index b57a61d..a71c17c 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1550,8 +1550,8 @@
                         body = paramstring
 
                 rawdata = http.request(
-                    self.site, uri, method='GET' if use_get else 'POST',
-                    headers=headers, body=body)
+                    site=self.site, uri=uri, method='GET' if use_get else 
'POST',
+                    body=body, headers=headers)
             except Server504Error:
                 pywikibot.log(u"Caught HTTP 504 error; retrying")
                 self.wait()
diff --git a/pywikibot/data/wikidataquery.py b/pywikibot/data/wikidataquery.py
index b78b3e6..fd08839 100644
--- a/pywikibot/data/wikidataquery.py
+++ b/pywikibot/data/wikidataquery.py
@@ -558,13 +558,13 @@
         url = self.getUrl(queryStr)
 
         try:
-            resp = http.request(None, url)
+            resp = http.fetch(url)
         except:
             pywikibot.warning(u"Failed to retrieve %s" % url)
             raise
 
         try:
-            data = json.loads(resp)
+            data = json.loads(resp.content)
         except ValueError:
             pywikibot.warning(u"Data received from host but no JSON could be 
decoded")
             raise pywikibot.ServerError("Data received from host but no JSON 
could be decoded")
diff --git a/pywikibot/data/wikistats.py b/pywikibot/data/wikistats.py
index b8f64fe..dee09fd 100644
--- a/pywikibot/data/wikistats.py
+++ b/pywikibot/data/wikistats.py
@@ -22,7 +22,7 @@
             ' falling back to using the larger XML datasets.')
         csv = None
 
-from pywikibot.comms import threadedhttp
+from pywikibot.comms import http
 
 
 class WikiStats(object):
@@ -110,11 +110,8 @@
         if table in self.FAMILY_MAPPING:
             table = self.FAMILY_MAPPING[table]
 
-        o = threadedhttp.Http()
-        r = o.request(uri=URL % (table, format))
-        if isinstance(r, Exception):
-            raise r
-        return r[1]
+        r = http.fetch(URL % (table, format))
+        return r.raw
 
     def raw_cached(self, table, format):
         """
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 0cdcbc8..25b0892 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -31,12 +31,10 @@
     long = int
     from html import entities as htmlentitydefs
     from urllib.parse import quote_from_bytes, unquote_to_bytes
-    from urllib.request import urlopen
 else:
     chr = unichr  # noqa
     import htmlentitydefs
     from urllib import quote as quote_from_bytes, unquote as unquote_to_bytes
-    from urllib import urlopen
 
 import pywikibot
 
@@ -2104,14 +2102,11 @@
     @deprecated("FilePage.latest_file_info.sha1")
     def getFileMd5Sum(self):
         """Return image file's MD5 checksum."""
-        # FIXME: MD5 might be performed on incomplete file due to server 
disconnection
-        # (see bug #1795683).
-        f = urlopen(self.fileUrl())
         # TODO: check whether this needs a User-Agent header added
+        req = http.fetch(self.fileUrl())
         h = hashlib.md5()
-        h.update(f.read())
+        h.update(req.raw)
         md5Checksum = h.hexdigest()
-        f.close()
         return md5Checksum
 
     @deprecated("FilePage.latest_file_info.sha1")
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index b01ae3f..f797874 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -2013,7 +2013,7 @@
     else:
         wiki = 'wikilang=%s&wikifam=.%s' % (lang, project)
     link = '%s&%s&max=%d&order=img_timestamp' % (URL, wiki, limit)
-    results = re.findall(REGEXP, http.request(site=None, uri=link))
+    results = re.findall(REGEXP, http.fetch(link))
     if not results:
         raise pywikibot.Error(
             u'Nothing found at %s! Try to use the tool by yourself to be sure '
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 527f210..5063908 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -144,11 +144,12 @@
     from pywikibot.comms import http
 
     uri = 'https://github.com/wikimedia/%s/!svn/vcc/default' % tag
-    data = http.request(site=None, uri=uri, method='PROPFIND',
-                        body="<?xml version='1.0' encoding='utf-8'?>"
-                        "<propfind xmlns=\"DAV:\"><allprop/></propfind>",
-                        headers={'label': str(rev), 'user-agent': 'SVN/1.7.5 
{pwb}'})
-
+    request = http.fetch(uri=uri, method='PROPFIND',
+                         body="<?xml version='1.0' encoding='utf-8'?>"
+                              "<propfind xmlns=\"DAV:\"><allprop/></propfind>",
+                         headers={'label': str(rev),
+                                  'user-agent': 'SVN/1.7.5 {pwb}'})
+    data = request.content
     dom = xml.dom.minidom.parse(StringIO(data))
     hsh = dom.getElementsByTagName("C:git-commit")[0].firstChild.nodeValue
     return hsh
@@ -240,14 +241,12 @@
     from pywikibot.comms import http
 
     url = repo or 'https://git.wikimedia.org/feed/pywikibot/core'
-    hsh = None
-    buf = http.request(site=None, uri=url)
-    buf = buf.split('\r\n')
+    buf = http.fetch(url).content.splitlines()
     try:
         hsh = buf[13].split('/')[5][:-1]
+        return hsh
     except Exception as e:
         raise ParseError(repr(e) + ' while parsing ' + repr(buf))
-    return hsh
 
 
 @deprecated('get_module_version, get_module_filename and get_module_mtime')

-- 
To view, visit https://gerrit.wikimedia.org/r/170054
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Id4008cd470b224ffcd3c0b894bba90a25e7611bd
Gerrit-PatchSet: 8
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <[email protected]>
Gerrit-Reviewer: John Vandenberg <[email protected]>
Gerrit-Reviewer: Ladsgroup <[email protected]>
Gerrit-Reviewer: Merlijn van Deen <[email protected]>
Gerrit-Reviewer: XZise <[email protected]>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to