Xqt triaged this task as "High" priority.
Xqt added a comment.
Changeing _requests._http_request() to
kwargs = {}
schemes = ('http', 'https')
if self.json_warning and self.site.protocol() in schemes:
# retry with other scheme
kwargs['protocol'] = schemes[self.site.protocol() == 'http']
try:
response = http.request(self.site, uri=uri,
method='GET' if use_get else 'POST',
data=data, headers=headers, **kwargs)
except Exception:
raise
gives a full traceback:
D:\pwb\GIT\core>pwb.py cosmetic_changes -lang:de -page:user:xqt/Test
-simulate
Retrieving 1 pages from wikipedia:de.
>>> Benutzer:Xqt/Test <<<
1 read operation
Execution time: 6 seconds
Read operation time: 6.0 seconds
Script terminated by exception:
ERROR: HTTPConnectionPool(host='wikispot.org', port=80): Max retries
exceeded with url: /w/api.php (Caused by
ConnectTimeoutError(<urllib3.connection.HTTPConnection object at
0x000001D2F3145D90>, 'Connection to wikispot.org timed out. (connect
timeout=6.05)')) (ConnectTimeout)
Traceback (most recent call last):
File "C:\Python311\Lib\site-packages\urllib3\connection.py", line 174, in
_new_conn
conn = connection.create_connection(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\urllib3\util\connection.py", line
95, in create_connection
raise err
File "C:\Python311\Lib\site-packages\urllib3\util\connection.py", line
85, in create_connection
sock.connect(sa)
TimeoutError: timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Python311\Lib\site-packages\urllib3\connectionpool.py", line
703, in urlopen
httplib_response = self._make_request(
^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\urllib3\connectionpool.py", line
398, in _make_request
conn.request(method, url, **httplib_request_kw)
File "C:\Python311\Lib\site-packages\urllib3\connection.py", line 244, in
request
super(HTTPConnection, self).request(method, url, body=body,
headers=headers)
File "C:\Python311\Lib\http\client.py", line 1282, in request
self._send_request(method, url, body, headers, encode_chunked)
File "C:\Python311\Lib\http\client.py", line 1328, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "C:\Python311\Lib\http\client.py", line 1277, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "C:\Python311\Lib\http\client.py", line 1037, in _send_output
self.send(msg)
File "C:\Python311\Lib\http\client.py", line 975, in send
self.connect()
File "C:\Python311\Lib\site-packages\urllib3\connection.py", line 205, in
connect
conn = self._new_conn()
^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\urllib3\connection.py", line 179, in
_new_conn
raise ConnectTimeoutError(
urllib3.exceptions.ConnectTimeoutError: (<urllib3.connection.HTTPConnection
object at 0x000001D2F3145D90>, 'Connection to wikispot.org timed out. (connect
timeout=6.05)')
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Python311\Lib\site-packages\requests\adapters.py", line 440, in
send
resp = conn.urlopen(
^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\urllib3\connectionpool.py", line
787, in urlopen
retries = retries.increment(
^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\urllib3\util\retry.py", line 592, in
increment
raise MaxRetryError(_pool, url, error or ResponseError(cause))
urllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='wikispot.org',
port=80): Max retries exceeded with url: /w/api.php (Caused by
ConnectTimeoutError(<urllib3.connection.HTTPConnection object at
0x000001D2F3145D90>, 'Connection to wikispot.org timed out. (connect
timeout=6.05)'))
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\pwb\GIT\core\pwb.py", line 39, in <module>
sys.exit(main())
^^^^^^
File "D:\pwb\GIT\core\pwb.py", line 35, in main
runpy.run_path(str(path), run_name='__main__')
File "<frozen runpy>", line 291, in run_path
File "<frozen runpy>", line 98, in _run_module_code
File "<frozen runpy>", line 88, in _run_code
File "D:\pwb\GIT\core\pywikibot\scripts\wrapper.py", line 514, in <module>
main()
File "D:\pwb\GIT\core\pywikibot\scripts\wrapper.py", line 498, in main
if not execute():
^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\scripts\wrapper.py", line 485, in execute
run_python_file(filename, script_args, module)
File "D:\pwb\GIT\core\pywikibot\scripts\wrapper.py", line 147, in
run_python_file
exec(compile(source, filename, 'exec', dont_inherit=True),
File "D:\pwb\GIT\core\scripts\cosmetic_changes.py", line 131, in <module>
main()
File "D:\pwb\GIT\core\scripts\cosmetic_changes.py", line 127, in main
bot.run()
File "D:\pwb\GIT\core\pywikibot\bot.py", line 1708, in run
self.treat(page)
File "D:\pwb\GIT\core\pywikibot\bot.py", line 1962, in treat
self.treat_page()
File "D:\pwb\GIT\core\scripts\cosmetic_changes.py", line 84, in treat_page
new_text = cc_toolkit.change(old_text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\cosmetic_changes.py", line 302, in change
new_text = self._change(text)
^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\cosmetic_changes.py", line 296, in _change
text = self.safe_execute(method, text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\cosmetic_changes.py", line 283, in
safe_execute
result = method(text)
^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\cosmetic_changes.py", line 645, in
cleanUpLinks
text = textlib.replaceExcept(text, linkR, handleOneLink,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\textlib.py", line 433, in replaceExcept
replacement = new(match)
^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\cosmetic_changes.py", line 527, in
handleOneLink
is_interwiki = self.site.isInterwikiLink(titleWithSection)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_basesite.py", line 344, in
isInterwikiLink
linkfam, linkcode = pywikibot.Link(text, self).parse_site()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\page\_links.py", line 365, in parse_site
newsite = self._source.interwiki(prefix)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 150, in interwiki
return self._interwikimap[prefix].site
~~~~~~~~~~~~~~~~~~^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_interwikimap.py", line 78, in
__getitem__
raise self._iw_sites[prefix].site
File "D:\pwb\GIT\core\pywikibot\site\_interwikimap.py", line 25, in site
self._site = pywikibot.Site(
^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\__init__.py", line 1339, in Site
_sites[key] = interface(code=code, fam=fam, user=user)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 127, in __init__
self.login(cookie_only=True)
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 384, in login
if self.userinfo['name'] == self.user():
^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 543, in userinfo
uidata = uirequest.submit()
^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\data\api\_requests.py", line 976, in
submit
self._add_defaults()
File "D:\pwb\GIT\core\pywikibot\data\api\_requests.py", line 442, in
_add_defaults
and self.site.mw_version >= '1.25wmf5'):
^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 1160, in
mw_version
mw_ver = MediaWikiVersion(self.version())
^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_apisite.py", line 1133, in version
version = self.siteinfo.get('generator', expiry=1).split(' ')[1]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_siteinfo.py", line 304, in get
preloaded = self._get_general(key, expiry)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_siteinfo.py", line 242, in
_get_general
default_info = self._get_siteinfo(props, expiry)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\site\_siteinfo.py", line 167, in
_get_siteinfo
data = request.submit()
^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\data\api\_requests.py", line 1284, in
submit
self._data = super().submit()
^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\data\api\_requests.py", line 994, in
submit
response, use_get = self._http_request(use_get, uri, body, headers,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\data\api\_requests.py", line 691, in
_http_request
response = http.request(self.site, uri=uri,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\comms\http.py", line 285, in request
r = fetch(baseuri, headers=headers, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pwb\GIT\core\pywikibot\comms\http.py", line 451, in fetch
response = session.request(method, uri,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\requests\sessions.py", line 529, in
request
resp = self.send(prep, **send_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\requests\sessions.py", line 645, in
send
r = adapter.send(request, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Python311\Lib\site-packages\requests\adapters.py", line 507, in
send
raise ConnectTimeout(e, request=request)
requests.exceptions.ConnectTimeout: HTTPConnectionPool(host='wikispot.org',
port=80): Max retries exceeded with url: /w/api.php (Caused by
ConnectTimeoutError(<urllib3.connection.HTTPConnection object at
0x000001D2F3145D90>, 'Connection to wikispot.org timed out. (connect
timeout=6.05)'))
CRITICAL: Exiting due to uncaught exception ConnectTimeout:
HTTPConnectionPool(host='wikispot.org', port=80): Max retries exceeded with
url: /w/api.php (Caused by
ConnectTimeoutError(<urllib3.connection.HTTPConnection object at
0x000001D2F3145D90>, 'Connection to wikispot.org timed out. (connect
timeout=6.05)'))
D:\pwb\GIT\core>
TASK DETAIL
https://phabricator.wikimedia.org/T337045
EMAIL PREFERENCES
https://phabricator.wikimedia.org/settings/panel/emailpreferences/
To: Xqt
Cc: Xqt, Aklapper, alaa, pywikibot-bugs-list, Meno25, PotsdamLamb, Jyoo1011,
JohnsonLee01, SHEKH, Dijkstra, Khutuck, Zkhalido, Viztor, Wenyi, Tbscho, MayS,
Mdupont, JJMC89, Dvorapa, Altostratus, Avicennasis, mys_721tx, jayvdb, Masti,
Alchimista
_______________________________________________
pywikibot-bugs mailing list -- [email protected]
To unsubscribe send an email to [email protected]