Hello community,
here is the log from the commit of package python-mechanize for
openSUSE:Factory checked in at 2020-01-22 22:46:38
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-mechanize (Old)
and /work/SRC/openSUSE:Factory/.python-mechanize.new.26092 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-mechanize"
Wed Jan 22 22:46:38 2020 rev:25 rq:766371 version:0.4.5
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-mechanize/python-mechanize.changes
2019-11-13 13:27:17.743633132 +0100
+++
/work/SRC/openSUSE:Factory/.python-mechanize.new.26092/python-mechanize.changes
2020-01-22 22:47:06.680601019 +0100
@@ -1,0 +2,13 @@
+Wed Jan 22 15:45:36 UTC 2020 - Martin Sirringhaus <[email protected]>
+
+- Update to 0.4.5:
+ * Add a set_html() method to the browser object
+
+- Update to 0.4.4:
+ * URLs passed into mechanize now automatically have URL unsafe
+ characters percent encoded. This is necessary because newer
+ versions of python disallow processing of URLs with unsafe
+ characters. Note that this means values return by
+ get_full_url(), get_selector() etc will be percent encoded.
+
+-------------------------------------------------------------------
Old:
----
mechanize-0.4.3.tar.gz
New:
----
mechanize-0.4.5.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-mechanize.spec ++++++
--- /var/tmp/diff_new_pack.0gpiva/_old 2020-01-22 22:47:07.768601567 +0100
+++ /var/tmp/diff_new_pack.0gpiva/_new 2020-01-22 22:47:07.772601569 +0100
@@ -1,7 +1,7 @@
#
# spec file for package python-mechanize
#
-# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2020 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
%define modname mechanize
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
Name: python-mechanize
-Version: 0.4.3
+Version: 0.4.5
Release: 0
Summary: Stateful programmatic web browsing
License: (BSD-3-Clause OR ZPL-2.1) AND BSD-3-Clause
++++++ mechanize-0.4.3.tar.gz -> mechanize-0.4.5.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/ChangeLog
new/mechanize-0.4.5/ChangeLog
--- old/mechanize-0.4.3/ChangeLog 2019-08-18 10:06:45.000000000 +0200
+++ new/mechanize-0.4.5/ChangeLog 2019-12-22 10:17:25.000000000 +0100
@@ -1,6 +1,17 @@
This isn't really in proper GNU ChangeLog format, it just happens to
look that way.
+2019-12-22 Kovid Goyal
+ * 0.4.5 release
+ * Add a set_html() method to the browser object
+
+2019-11-07 Kovid Goyal
+ * 0.4.4 release
+ * URLs passed into mechanize now automatically have URL unsafe
characters
+ percent encoded. This is necessary because newer versions of python
+ disallow processing of URLs with unsafe characters. Note that this means
+ values return by get_full_url(), get_selector() etc will be percent
encoded.
+
2019-08-18 Kovid Goyal
* 0.4.3 release
* When filling forms with unicode strings automatically encode them into
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/PKG-INFO new/mechanize-0.4.5/PKG-INFO
--- old/mechanize-0.4.3/PKG-INFO 2019-08-18 10:08:08.000000000 +0200
+++ new/mechanize-0.4.5/PKG-INFO 2019-12-22 10:18:31.393247600 +0100
@@ -1,12 +1,12 @@
Metadata-Version: 2.1
Name: mechanize
-Version: 0.4.3
+Version: 0.4.5
Summary: Stateful programmatic web browsing.
Home-page: https://github.com/python-mechanize/mechanize
Author: Kovid Goyal
Author-email: [email protected]
License: BSD
-Download-URL:
https://pypi.python.org/packages/source/m/mechanize/mechanize-0.4.3.tar.gz
+Download-URL:
https://pypi.python.org/packages/source/m/mechanize/mechanize-0.4.5.tar.gz
Description: Stateful programmatic web browsing, after Andy Lester's Perl
module
WWW::Mechanize.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/README.rst
new/mechanize-0.4.5/README.rst
--- old/mechanize-0.4.3/README.rst 2019-02-14 09:39:32.000000000 +0100
+++ new/mechanize-0.4.5/README.rst 2019-09-05 10:00:47.000000000 +0200
@@ -85,6 +85,6 @@
:target: https://pypi.python.org/pypi/mechanize
:alt: Latest version released on PyPi
-.. |build| image::
https://dev.azure.com/kovidgoyal/mechanize/_apis/build/status/python-mechanize.mechanize?branchName=master
- :target:
https://dev.azure.com/kovidgoyal/mechanize/_build/latest?definitionId=1&branchName=master
+.. |build| image::
https://dev.azure.com/divok/mechanize/_apis/build/status/python-mechanize.mechanize?branchName=master
+ :target:
https://dev.azure.com/divok/mechanize/_build/latest?definitionId=3&branchName=master
:alt: Build status of the master branch
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_entities.py
new/mechanize-0.4.5/mechanize/_entities.py
--- old/mechanize-0.4.3/mechanize/_entities.py 2019-01-10 11:34:12.000000000
+0100
+++ new/mechanize-0.4.5/mechanize/_entities.py 2019-09-02 11:36:37.000000000
+0200
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
-# License: BSD Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
+# Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (absolute_import, division, print_function,
unicode_literals)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_equiv.py
new/mechanize-0.4.5/mechanize/_equiv.py
--- old/mechanize-0.4.3/mechanize/_equiv.py 2019-01-10 11:34:12.000000000
+0100
+++ new/mechanize-0.4.5/mechanize/_equiv.py 2019-09-02 11:36:29.000000000
+0200
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
-# License: BSD Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
+# Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (absolute_import, division, print_function,
unicode_literals)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_headersutil.py
new/mechanize-0.4.5/mechanize/_headersutil.py
--- old/mechanize-0.4.3/mechanize/_headersutil.py 2019-04-16
11:16:55.000000000 +0200
+++ new/mechanize-0.4.5/mechanize/_headersutil.py 2019-12-22
04:27:48.000000000 +0100
@@ -20,6 +20,8 @@
def is_html_file_extension(url, allow_xhtml):
+ if url is None:
+ return False
ext = os.path.splitext(_rfc3986.urlsplit(url)[2])[1]
html_exts = [".htm", ".html"]
if allow_xhtml:
@@ -27,7 +29,7 @@
return ext in html_exts
-def is_html(ct_headers, url, allow_xhtml=False):
+def is_html(ct_headers, url=None, allow_xhtml=False):
"""
ct_headers: Sequence of Content-Type headers
url: Response URL
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_mechanize.py
new/mechanize-0.4.5/mechanize/_mechanize.py
--- old/mechanize-0.4.3/mechanize/_mechanize.py 2019-08-18 10:06:45.000000000
+0200
+++ new/mechanize-0.4.5/mechanize/_mechanize.py 2019-12-22 04:27:48.000000000
+0100
@@ -20,6 +20,7 @@
from ._html import Factory
from ._useragent import UserAgentBase
from .polyglot import pathname2url, HTTPError, is_string, iteritems
+from ._response import make_response
class BrowserStateError(Exception):
@@ -120,6 +121,8 @@
received html/xhtml content. See the builtin
:func:`mechanize._html.content_parser()` function for details
on the interface this function must support.
+ :param factory_class: HTML Factory class to use. Defaults to
+ :class:`mechanize.Factory`
"""
@@ -133,6 +136,7 @@
history=None,
request_class=None,
content_parser=None,
+ factory_class=Factory,
allow_xhtml=False, ):
"""
Only named arguments should be passed to this constructor.
@@ -147,7 +151,7 @@
if request_class is None:
request_class = _request.Request
- factory = Factory(allow_xhtml=allow_xhtml)
+ factory = factory_class(allow_xhtml=allow_xhtml)
factory.set_request_class(request_class)
if content_parser is not None:
factory.set_content_parser(content_parser)
@@ -380,6 +384,15 @@
# fails
self.request = request
+ def set_html(self, html, url="http://example.com/"):
+ """Set the response to dummy with given HTML, and URL if given.
+
+ Allows you to then parse that HTML, especially to extract forms
+ information. If no URL was given then the default is "example.com".
+ """
+ response = make_response(html, [("Content-type", "text/html")], url)
+ self._set_response(response, True)
+
def geturl(self):
"""Get URL of current document."""
if self._response is None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_response.py
new/mechanize-0.4.5/mechanize/_response.py
--- old/mechanize-0.4.3/mechanize/_response.py 2019-04-16 11:16:55.000000000
+0200
+++ new/mechanize-0.4.5/mechanize/_response.py 2019-12-22 04:27:48.000000000
+0100
@@ -426,7 +426,7 @@
def test_response(data='test data',
headers=[],
- url="http://example.com/",
+ url=None,
code=200,
msg="OK"):
return make_response(data, headers, url, code, msg)
@@ -434,14 +434,14 @@
def test_html_response(data='test data',
headers=[],
- url="http://example.com/",
+ url=None,
code=200,
msg="OK"):
headers += [("Content-type", "text/html")]
return make_response(data, headers, url, code, msg)
-def make_response(data, headers, url, code, msg):
+def make_response(data, headers, url=None, code=200, msg="OK"):
"""Convenient factory for objects implementing response interface.
data: string containing response body data
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_urllib2_fork.py
new/mechanize-0.4.5/mechanize/_urllib2_fork.py
--- old/mechanize-0.4.3/mechanize/_urllib2_fork.py 2019-04-16
11:16:55.000000000 +0200
+++ new/mechanize-0.4.5/mechanize/_urllib2_fork.py 2019-11-06
15:58:03.000000000 +0100
@@ -44,7 +44,7 @@
import time
from collections import OrderedDict
from functools import partial
-from io import BytesIO, BufferedReader
+from io import BufferedReader, BytesIO
from . import _rfc3986
from ._clientcookie import CookieJar
@@ -57,7 +57,7 @@
splitpasswd, splitport, splittype, splituser,
splitvalue, unquote, unwrap, url2pathname,
urllib_proxy_bypass, urllib_splithost, urlparse,
- urlsplit)
+ urlsplit, urlunparse)
def sha1_digest(data):
@@ -133,12 +133,63 @@
return host.lower()
+PERCENT_RE = re.compile(b"%[a-fA-F0-9]{2}")
+ZONE_ID_CHARS = set(bytearray(
+ b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"abcdefghijklmnopqrstuvwxyz"
b"0123456789._!-"
+))
+USERINFO_CHARS = ZONE_ID_CHARS | set(bytearray(b"$&'()*+,;=:"))
+PATH_CHARS = USERINFO_CHARS | set(bytearray(b'@/'))
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {ord(b"?")}
+
+
+def fix_invalid_bytes_in_url_component(component, allowed_chars=PATH_CHARS):
+ if not component:
+ return component
+ is_bytes = isinstance(component, bytes)
+ if not is_bytes:
+ component = component.encode('utf-8', 'surrogatepass')
+ percent_encodings = PERCENT_RE.findall(component)
+ for enc in percent_encodings:
+ if not enc.isupper():
+ component = component.replace(enc, enc.upper())
+ is_percent_encoded = len(percent_encodings) == component.count(b"%")
+ encoded_component = bytearray()
+ percent = ord('%')
+ for byte_ord in bytearray(component):
+ if (is_percent_encoded and byte_ord == percent) or (byte_ord < 128 and
byte_ord in allowed_chars):
+ encoded_component.append(byte_ord)
+ continue
+ encoded_component.extend(b"%" +
(hex(byte_ord)[2:].encode().zfill(2).upper()))
+ encoded_component = bytes(encoded_component)
+ if not is_bytes:
+ encoded_component = encoded_component.decode('utf-8')
+ return encoded_component
+
+
+def normalize_url(url):
+ parsed = urlparse(url)
+ netloc = parsed.netloc
+ if not isinstance(netloc, bytes) and netloc:
+ def safe_encode(label):
+ try:
+ return label.encode('idna').decode('ascii')
+ except ValueError:
+ return label.encode('ascii', 'replace').decode('ascii')
+ netloc = u'.'.join(map(safe_encode, netloc.split(u'.')))
+
+ return urlunparse(parsed._replace(
+ path=fix_invalid_bytes_in_url_component(parsed.path), netloc=netloc,
+ query=fix_invalid_bytes_in_url_component(parsed.query, QUERY_CHARS),
+ fragment=fix_invalid_bytes_in_url_component(parsed.fragment,
FRAGMENT_CHARS),
+ ))
+
+
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False, method=None):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
- self.__original = unwrap(url)
+ self.__original = normalize_url(unwrap(url))
self.type = None
self._method = method and str(method)
# self.__r_type is what's left after doing the splittype
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/_version.py
new/mechanize-0.4.5/mechanize/_version.py
--- old/mechanize-0.4.3/mechanize/_version.py 2019-08-18 10:06:45.000000000
+0200
+++ new/mechanize-0.4.5/mechanize/_version.py 2019-12-22 10:17:38.000000000
+0100
@@ -1,2 +1,2 @@
-"0.4.3"
-__version__ = (0, 4, 3, None, None)
+"0.4.5"
+__version__ = (0, 4, 5, None, None)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize/polyglot.py
new/mechanize-0.4.5/mechanize/polyglot.py
--- old/mechanize-0.4.3/mechanize/polyglot.py 2019-01-10 11:34:12.000000000
+0100
+++ new/mechanize-0.4.5/mechanize/polyglot.py 2019-11-17 11:18:43.000000000
+0100
@@ -1,12 +1,12 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
-# License: GPLv3 Copyright: 2018, Kovid Goyal <kovid at kovidgoyal.net>
+# Copyright: 2018, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (absolute_import, division, print_function,
unicode_literals)
-import sys
import collections
+import sys
is_py2 = sys.version_info.major < 3
@@ -58,12 +58,11 @@
else:
+ import re
from urllib.error import HTTPError, URLError
from urllib.robotparser import RobotFileParser
from urllib.parse import (
- urlsplit, urljoin, urlparse, urlunparse,
- urlencode, quote_plus, splitattr, splithost as urllib_splithost,
- splitpasswd, splitport, splittype, splituser, splitvalue,
+ urlsplit, urljoin, urlparse, urlunparse, urlencode, quote_plus,
unquote, unwrap
)
from urllib.request import (
@@ -81,6 +80,10 @@
MozillaCookieJar, request_host)
from io import StringIO
+ def splitattr(url):
+ words = url.split(';')
+ return words[0], words[1:]
+
def is_string(x):
return isinstance(x, str)
@@ -131,6 +134,66 @@
r.set_proxy(v, k)
return _urlopen(r, *a[1:], **kw)
+ _hostprog = None
+
+ def urllib_splithost(url):
+ """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
+ global _hostprog
+ if _hostprog is None:
+ _hostprog = re.compile('//([^/#?]*)(.*)', re.DOTALL)
+
+ match = _hostprog.match(url)
+ if match:
+ host_port, path = match.groups()
+ if path and path[0] != '/':
+ path = '/' + path
+ return host_port, path
+ return None, url
+
+ _typeprog = None
+
+ def splittype(url):
+ """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
+ global _typeprog
+ if _typeprog is None:
+ _typeprog = re.compile('([^/:]+):(.*)', re.DOTALL)
+
+ match = _typeprog.match(url)
+ if match:
+ scheme, data = match.groups()
+ return scheme.lower(), data
+ return None, url
+
+ def splituser(host):
+ """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]',
'host[:port]'."""
+ user, delim, host = host.rpartition('@')
+ return (user if delim else None), host
+
+ def splitpasswd(user):
+ """splitpasswd('user:passwd') -> 'user', 'passwd'."""
+ user, delim, passwd = user.partition(':')
+ return user, (passwd if delim else None)
+
+ _portprog = None
+
+ def splitport(host):
+ """splitport('host:port') --> 'host', 'port'."""
+ global _portprog
+ if _portprog is None:
+ _portprog = re.compile('(.*):([0-9]*)$', re.DOTALL)
+
+ match = _portprog.match(host)
+ if match:
+ host, port = match.groups()
+ if port:
+ return host, port
+ return host, None
+
+ def splitvalue(attr):
+ """splitvalue('attr=value') --> 'attr', 'value'."""
+ attr, delim, value = attr.partition('=')
+ return attr, (value if delim else None)
+
def as_unicode(x, encoding='utf-8'):
if isinstance(x, bytes):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/mechanize.egg-info/PKG-INFO
new/mechanize-0.4.5/mechanize.egg-info/PKG-INFO
--- old/mechanize-0.4.3/mechanize.egg-info/PKG-INFO 2019-08-18
10:08:08.000000000 +0200
+++ new/mechanize-0.4.5/mechanize.egg-info/PKG-INFO 2019-12-22
10:18:31.000000000 +0100
@@ -1,12 +1,12 @@
Metadata-Version: 2.1
Name: mechanize
-Version: 0.4.3
+Version: 0.4.5
Summary: Stateful programmatic web browsing.
Home-page: https://github.com/python-mechanize/mechanize
Author: Kovid Goyal
Author-email: [email protected]
License: BSD
-Download-URL:
https://pypi.python.org/packages/source/m/mechanize/mechanize-0.4.3.tar.gz
+Download-URL:
https://pypi.python.org/packages/source/m/mechanize/mechanize-0.4.5.tar.gz
Description: Stateful programmatic web browsing, after Andy Lester's Perl
module
WWW::Mechanize.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/publish.py
new/mechanize-0.4.5/publish.py
--- old/mechanize-0.4.3/publish.py 2019-01-10 11:34:12.000000000 +0100
+++ new/mechanize-0.4.5/publish.py 2019-09-02 11:36:17.000000000 +0200
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
-# License: BSD Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
+# Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (absolute_import, division, print_function,
unicode_literals)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/test/test_browser.py
new/mechanize-0.4.5/test/test_browser.py
--- old/mechanize-0.4.3/test/test_browser.py 2019-01-11 06:06:58.000000000
+0100
+++ new/mechanize-0.4.5/test/test_browser.py 2019-12-22 04:27:48.000000000
+0100
@@ -11,7 +11,6 @@
import mechanize._response
import mechanize._testcase
from mechanize._gzip import HTTPGzipProcessor, compress_readable_output
-from mechanize._response import test_html_response
from mechanize.polyglot import (HTTPConnection, addinfourl, codepoint_to_chr,
create_response_info, iteritems, unicode_type)
@@ -447,13 +446,9 @@
def test_forms(self):
import mechanize
- url = "http://example.com"
b = TestBrowser()
- r = test_html_response(
- url=url,
- headers=[("content-type", "text/html")],
- data="""\
+ b.set_html("""\
<html>
<head><title>Title</title></head>
<body>
@@ -470,8 +465,6 @@
</body>
</html>
""")
- b.add_handler(make_mock_handler()([("http_open", r)]))
- r = b.open(url)
forms = list(b.forms())
self.assertEqual(len(forms), 2)
@@ -1058,7 +1051,6 @@
407, 'Proxy-Authenticate: Basic realm="realm"\r\n\r\n')
test_state(test_one_visit([ph, hh, ah]))
- from mechanize._response import test_response
br = TestBrowser2()
html = b"""\
<html><body>
@@ -1066,22 +1058,20 @@
<form><input type="text" name="b" /></form>
</body></html>
"""
- response = test_response(html, headers=[("Content-type", "text/html")])
self.assertRaises(mechanize.BrowserStateError, br.global_form)
- br.set_response(response)
+ br.set_html(html)
self.assertEqual(str(br.global_form().find_control(nr=0).name), 'a')
self.assertEqual(len(list(br.forms())), 1)
self.assertEqual(str(next(iter(br.forms())).find_control(nr=0).name),
'b')
- from mechanize._response import test_html_response
br = TestBrowser2()
- br.visit_response(test_html_response(b"""\
+ br.set_html("""\
<html><head><title></title></head><body>
<input type="text" name="a" value="b"></input>
<form>
<input type="text" name="p" value="q"></input>
</form>
-</body></html>"""))
+</body></html>""")
def has_a(form):
try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/test/test_forms.doctest
new/mechanize-0.4.5/test/test_forms.doctest
--- old/mechanize-0.4.3/test/test_forms.doctest 2017-03-12 12:02:56.000000000
+0100
+++ new/mechanize-0.4.5/test/test_forms.doctest 2019-12-22 04:27:48.000000000
+0100
@@ -13,8 +13,7 @@
... '<input type="submit" name="s"/></form>' % method
... )
... br = mechanize.Browser()
-... response = test_response(data, [("content-type", "text/html")])
-... br.set_response(response)
+... br.set_html(data, "http://example.com/")
... br.select_form(nr=0)
... forms.append(br.form)
... return forms
@@ -34,13 +33,12 @@
>>> import mechanize
>>> br = mechanize.Browser()
->>> r = mechanize._response.test_html_response("""\
+>>> br.set_html("""\
... <form>
... <input type="text" name="foo" value="a"></input><!!!>
... <input type="text" name="bar" value="b"></input>
... </form>
... """)
->>> br.set_response(r)
>>> try:
... br.select_form(nr=0)
... except mechanize.ParseError:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/test/test_html.py
new/mechanize-0.4.5/test/test_html.py
--- old/mechanize-0.4.3/test/test_html.py 2019-01-10 11:34:12.000000000
+0100
+++ new/mechanize-0.4.5/test/test_html.py 2019-12-22 04:27:48.000000000
+0100
@@ -13,16 +13,16 @@
def test_close_base_tag(self):
# any document containing a </base> tag used to cause an exception
br = mechanize.Browser()
- response = test_html_response("</base>")
- br.set_response(response)
+ br.set_html("</base>")
list(br.links())
def test_bad_base_tag(self):
# a document with a base tag with no href used to cause an exception
br = mechanize.Browser()
- response = test_html_response(
- "<BASE TARGET='_main'><a href='http://example.com/'>eg</a>")
- br.set_response(response)
+ br.set_html(
+ "<BASE TARGET='_main'><a href='http://example.com/'>eg</a>",
+ url="http://example.com/",
+ )
list(br.links())
@@ -71,7 +71,7 @@
def get_first_link_text(html):
factory = Factory()
- response = test_html_response(html)
+ response = test_html_response(html, url="http://example.com/")
factory.set_response(response)
return list(factory.links())[0].text
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/test/test_http.py
new/mechanize-0.4.5/test/test_http.py
--- old/mechanize-0.4.3/test/test_http.py 2019-01-10 11:34:12.000000000
+0100
+++ new/mechanize-0.4.5/test/test_http.py 2019-09-02 11:34:38.000000000
+0200
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
-# License: GPLv3 Copyright: 2019, Kovid Goyal <kovid at kovidgoyal.net>
+# Copyright: 2019, Kovid Goyal <kovid at kovidgoyal.net>
from unittest import TestCase
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/mechanize-0.4.3/test/test_urllib2.py
new/mechanize-0.4.5/test/test_urllib2.py
--- old/mechanize-0.4.3/test/test_urllib2.py 2019-02-14 09:39:32.000000000
+0100
+++ new/mechanize-0.4.5/test/test_urllib2.py 2019-12-22 04:27:48.000000000
+0100
@@ -1,3 +1,4 @@
+# vim:fileencoding=utf-8
"""Tests for urllib2-level functionality.
This is urllib2's tests (most of which came from mechanize originally), plus
@@ -23,7 +24,7 @@
HTTPCookieProcessor, HTTPRefererProcessor, \
HTTPErrorProcessor, HTTPHandler
from mechanize import OpenerDirector, build_opener, Request
-from mechanize._urllib2_fork import AbstractHTTPHandler
+from mechanize._urllib2_fork import AbstractHTTPHandler, normalize_url
from mechanize._util import write_file
import mechanize._response
@@ -1364,7 +1365,7 @@
rp._sleep = st.sleep
rp.http_response(
Request("http://example.com"),
- test_response(headers=[("Refresh", header)]), )
+ test_response(headers=[("Refresh", header)],
url="http://example.com/"), )
self.assertEqual(expect_refresh, opener.called)
st.verify()
@@ -1866,14 +1867,22 @@
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
- self.assertEqual("http://www.python.org/~jeremy/",
+ self.assertEqual("http://www.python.org/%7Ejeremy/",
self.get.get_full_url())
def test_selector(self):
- self.assertEqual("/~jeremy/", self.get.get_selector())
+ self.assertEqual("/%7Ejeremy/", self.get.get_selector())
req = Request("http://www.python.org/")
self.assertEqual("/", req.get_selector())
+ def test_normalize_url(self):
+ def t(x, expected=None):
+ self.assertEqual(normalize_url(x), expected or x)
+
+ t('https://simple.com/moo%7Ese')
+ t('https://ex.com/Spört', 'https://ex.com/Sp%C3%B6rt')
+ t('https://ex.com/Sp%C3%B6rt')
+
def test_get_type(self):
self.assertEqual("http", self.get.get_type())