Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-httpcore for openSUSE:Factory
checked in at 2023-05-19 11:55:01
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-httpcore (Old)
and /work/SRC/openSUSE:Factory/.python-httpcore.new.1533 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-httpcore"
Fri May 19 11:55:01 2023 rev:10 rq:1084216 version:0.17.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-httpcore/python-httpcore.changes
2023-04-22 21:58:14.060417909 +0200
+++
/work/SRC/openSUSE:Factory/.python-httpcore.new.1533/python-httpcore.changes
2023-05-19 11:55:19.667196683 +0200
@@ -1,0 +2,10 @@
+Wed May 3 08:46:32 UTC 2023 - Dirk Müller <[email protected]>
+
+- update to 0.17.0:
+ * Add DEBUG level logging. (#648)
+ * Respect HTTP/2 max concurrent streams when settings updates
+ are sent by server. (#652)
+ * Increase the allowable HTTP header size to 100kB. (#647)
+ * Add `retries` option to SOCKS proxy classes. (#643)
+
+-------------------------------------------------------------------
Old:
----
httpcore-0.16.3.tar.gz
New:
----
httpcore-0.17.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-httpcore.spec ++++++
--- /var/tmp/diff_new_pack.DVMJcQ/_old 2023-05-19 11:55:20.315200392 +0200
+++ /var/tmp/diff_new_pack.DVMJcQ/_new 2023-05-19 11:55:20.319200415 +0200
@@ -27,7 +27,7 @@
%{?sle15_python_module_pythons}
Name: python-httpcore%{psuffix}
-Version: 0.16.3
+Version: 0.17.0
Release: 0
Summary: Minimal low-level Python HTTP client
License: BSD-3-Clause
++++++ httpcore-0.16.3.tar.gz -> httpcore-0.17.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/CHANGELOG.md
new/httpcore-0.17.0/CHANGELOG.md
--- old/httpcore-0.16.3/CHANGELOG.md 2022-12-20 13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/CHANGELOG.md 2023-03-16 14:41:19.000000000 +0100
@@ -4,6 +4,13 @@
The format is based on [Keep a
Changelog](https://keepachangelog.com/en/1.0.0/).
+## 0.17.0 (March 16th, 2023)
+
+- Add DEBUG level logging. (#648)
+- Respect HTTP/2 max concurrent streams when settings updates are sent by
server. (#652)
+- Increase the allowable HTTP header size to 100kB. (#647)
+- Add `retries` option to SOCKS proxy classes. (#643)
+
## 0.16.3 (December 20th, 2022)
- Allow `ws` and `wss` schemes. Allows us to properly support websocket
upgrade connections. (#625)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/docs/logging.md
new/httpcore-0.17.0/docs/logging.md
--- old/httpcore-0.16.3/docs/logging.md 1970-01-01 01:00:00.000000000 +0100
+++ new/httpcore-0.17.0/docs/logging.md 2023-03-16 14:41:19.000000000 +0100
@@ -0,0 +1,41 @@
+# Logging
+
+If you need to inspect the internal behaviour of `httpcore`, you can use
Python's standard logging to output debug level information.
+
+For example, the following configuration...
+
+```python
+import logging
+import httpcore
+
+logging.basicConfig(
+ format="%(levelname)s [%(asctime)s] %(name)s - %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+ level=logging.DEBUG
+)
+
+httpcore.request('GET', 'https://www.example.com')
+```
+
+Will send debug level output to the console, or wherever `stdout` is directed
too...
+
+```
+DEBUG [2023-01-09 14:44:00] httpcore - connection.connect_tcp.started
host='www.example.com' port=443 local_address=None timeout=None
+DEBUG [2023-01-09 14:44:00] httpcore - connection.connect_tcp.complete
return_value=<httpcore.backends.sync.SyncStream object at 0x109ba6610>
+DEBUG [2023-01-09 14:44:00] httpcore - connection.start_tls.started
ssl_context=<ssl.SSLContext object at 0x109e427b0>
server_hostname='www.example.com' timeout=None
+DEBUG [2023-01-09 14:44:00] httpcore - connection.start_tls.complete
return_value=<httpcore.backends.sync.SyncStream object at 0x109e8b050>
+DEBUG [2023-01-09 14:44:00] httpcore - http11.send_request_headers.started
request=<Request [b'GET']>
+DEBUG [2023-01-09 14:44:00] httpcore - http11.send_request_headers.complete
+DEBUG [2023-01-09 14:44:00] httpcore - http11.send_request_body.started
request=<Request [b'GET']>
+DEBUG [2023-01-09 14:44:00] httpcore - http11.send_request_body.complete
+DEBUG [2023-01-09 14:44:00] httpcore - http11.receive_response_headers.started
request=<Request [b'GET']>
+DEBUG [2023-01-09 14:44:00] httpcore -
http11.receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK',
[(b'Age', b'572646'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type',
b'text/html; charset=UTF-8'), (b'Date', b'Mon, 09 Jan 2023 14:44:00 GMT'),
(b'Etag', b'"3147526947+ident"'), (b'Expires', b'Mon, 16 Jan 2023 14:44:00
GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS
(nyb/1D18)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'),
(b'Content-Length', b'1256')])
+DEBUG [2023-01-09 14:44:00] httpcore - http11.receive_response_body.started
request=<Request [b'GET']>
+DEBUG [2023-01-09 14:44:00] httpcore - http11.receive_response_body.complete
+DEBUG [2023-01-09 14:44:00] httpcore - http11.response_closed.started
+DEBUG [2023-01-09 14:44:00] httpcore - http11.response_closed.complete
+DEBUG [2023-01-09 14:44:00] httpcore - connection.close.started
+DEBUG [2023-01-09 14:44:00] httpcore - connection.close.complete
+```
+
+The exact formatting of the debug logging may be subject to change across
different versions of `httpcore`. If you need to rely on a particular format it
is recommended that you pin installation of the package to a fixed version.
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/__init__.py
new/httpcore-0.17.0/httpcore/__init__.py
--- old/httpcore-0.16.3/httpcore/__init__.py 2022-12-20 13:07:43.000000000
+0100
+++ new/httpcore-0.17.0/httpcore/__init__.py 2023-03-16 14:41:19.000000000
+0100
@@ -82,7 +82,7 @@
"WriteError",
]
-__version__ = "0.16.3"
+__version__ = "0.17.0"
__locals = locals()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_async/connection.py
new/httpcore-0.17.0/httpcore/_async/connection.py
--- old/httpcore-0.16.3/httpcore/_async/connection.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_async/connection.py 2023-03-16
14:41:19.000000000 +0100
@@ -156,7 +156,8 @@
async def aclose(self) -> None:
if self._connection is not None:
- await self._connection.aclose()
+ async with Trace("connection.close", None, {}):
+ await self._connection.aclose()
def is_available(self) -> bool:
if self._connection is None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_async/http11.py
new/httpcore-0.17.0/httpcore/_async/http11.py
--- old/httpcore-0.16.3/httpcore/_async/http11.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_async/http11.py 2023-03-16
14:41:19.000000000 +0100
@@ -43,6 +43,7 @@
class AsyncHTTP11Connection(AsyncConnectionInterface):
READ_NUM_BYTES = 64 * 1024
+ MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024
def __init__(
self,
@@ -57,7 +58,10 @@
self._state = HTTPConnectionState.NEW
self._state_lock = AsyncLock()
self._request_count = 0
- self._h11_state = h11.Connection(our_role=h11.CLIENT)
+ self._h11_state = h11.Connection(
+ our_role=h11.CLIENT,
+ max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE,
+ )
async def handle_async_request(self, request: Request) -> Response:
if not self.can_handle_request(request.url.origin):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_async/http2.py
new/httpcore-0.17.0/httpcore/_async/http2.py
--- old/httpcore-0.16.3/httpcore/_async/http2.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_async/http2.py 2023-03-16
14:41:19.000000000 +0100
@@ -88,8 +88,18 @@
async with Trace("http2.send_connection_init", request,
kwargs):
await self._send_connection_init(**kwargs)
self._sent_connection_init = True
- max_streams =
self._h2_state.local_settings.max_concurrent_streams
- self._max_streams_semaphore = AsyncSemaphore(max_streams)
+
+ # Initially start with just 1 until the remote server provides
+ # its max_concurrent_streams value
+ self._max_streams = 1
+
+ local_settings_max_streams = (
+ self._h2_state.local_settings.max_concurrent_streams
+ )
+ self._max_streams_semaphore =
AsyncSemaphore(local_settings_max_streams)
+
+ for _ in range(local_settings_max_streams - self._max_streams):
+ await self._max_streams_semaphore.acquire()
await self._max_streams_semaphore.acquire()
@@ -280,6 +290,13 @@
if stream_id is None or not self._events.get(stream_id):
events = await self._read_incoming_data(request)
for event in events:
+ if isinstance(event, h2.events.RemoteSettingsChanged):
+ async with Trace(
+ "http2.receive_remote_settings", request
+ ) as trace:
+ await self._receive_remote_settings_change(event)
+ trace.return_value = event
+
event_stream_id = getattr(event, "stream_id", 0)
# The ConnectionTerminatedEvent applies to the entire
connection,
@@ -293,6 +310,23 @@
await self._write_outgoing_data(request)
+ async def _receive_remote_settings_change(self, event: h2.events.Event) ->
None:
+ max_concurrent_streams = event.changed_settings.get(
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
+ )
+ if max_concurrent_streams:
+ new_max_streams = min(
+ max_concurrent_streams.new_value,
+ self._h2_state.local_settings.max_concurrent_streams,
+ )
+ if new_max_streams and new_max_streams != self._max_streams:
+ while new_max_streams > self._max_streams:
+ await self._max_streams_semaphore.release()
+ self._max_streams += 1
+ while new_max_streams < self._max_streams:
+ await self._max_streams_semaphore.acquire()
+ self._max_streams -= 1
+
async def _response_closed(self, stream_id: int) -> None:
await self._max_streams_semaphore.release()
del self._events[stream_id]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_async/socks_proxy.py
new/httpcore-0.17.0/httpcore/_async/socks_proxy.py
--- old/httpcore-0.16.3/httpcore/_async/socks_proxy.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_async/socks_proxy.py 2023-03-16
14:41:19.000000000 +0100
@@ -114,6 +114,7 @@
keepalive_expiry: typing.Optional[float] = None,
http1: bool = True,
http2: bool = False,
+ retries: int = 0,
network_backend: typing.Optional[AsyncNetworkBackend] = None,
) -> None:
"""
@@ -154,6 +155,7 @@
http1=http1,
http2=http2,
network_backend=network_backend,
+ retries=retries,
)
self._ssl_context = ssl_context
self._proxy_url = enforce_url(proxy_url, name="proxy_url")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_sync/connection.py
new/httpcore-0.17.0/httpcore/_sync/connection.py
--- old/httpcore-0.16.3/httpcore/_sync/connection.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_sync/connection.py 2023-03-16
14:41:19.000000000 +0100
@@ -156,7 +156,8 @@
def close(self) -> None:
if self._connection is not None:
- self._connection.close()
+ with Trace("connection.close", None, {}):
+ self._connection.close()
def is_available(self) -> bool:
if self._connection is None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_sync/http11.py
new/httpcore-0.17.0/httpcore/_sync/http11.py
--- old/httpcore-0.16.3/httpcore/_sync/http11.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_sync/http11.py 2023-03-16
14:41:19.000000000 +0100
@@ -43,6 +43,7 @@
class HTTP11Connection(ConnectionInterface):
READ_NUM_BYTES = 64 * 1024
+ MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024
def __init__(
self,
@@ -57,7 +58,10 @@
self._state = HTTPConnectionState.NEW
self._state_lock = Lock()
self._request_count = 0
- self._h11_state = h11.Connection(our_role=h11.CLIENT)
+ self._h11_state = h11.Connection(
+ our_role=h11.CLIENT,
+ max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE,
+ )
def handle_request(self, request: Request) -> Response:
if not self.can_handle_request(request.url.origin):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_sync/http2.py
new/httpcore-0.17.0/httpcore/_sync/http2.py
--- old/httpcore-0.16.3/httpcore/_sync/http2.py 2022-12-20 13:07:43.000000000
+0100
+++ new/httpcore-0.17.0/httpcore/_sync/http2.py 2023-03-16 14:41:19.000000000
+0100
@@ -88,8 +88,18 @@
with Trace("http2.send_connection_init", request, kwargs):
self._send_connection_init(**kwargs)
self._sent_connection_init = True
- max_streams =
self._h2_state.local_settings.max_concurrent_streams
- self._max_streams_semaphore = Semaphore(max_streams)
+
+ # Initially start with just 1 until the remote server provides
+ # its max_concurrent_streams value
+ self._max_streams = 1
+
+ local_settings_max_streams = (
+ self._h2_state.local_settings.max_concurrent_streams
+ )
+ self._max_streams_semaphore =
Semaphore(local_settings_max_streams)
+
+ for _ in range(local_settings_max_streams - self._max_streams):
+ self._max_streams_semaphore.acquire()
self._max_streams_semaphore.acquire()
@@ -280,6 +290,13 @@
if stream_id is None or not self._events.get(stream_id):
events = self._read_incoming_data(request)
for event in events:
+ if isinstance(event, h2.events.RemoteSettingsChanged):
+ with Trace(
+ "http2.receive_remote_settings", request
+ ) as trace:
+ self._receive_remote_settings_change(event)
+ trace.return_value = event
+
event_stream_id = getattr(event, "stream_id", 0)
# The ConnectionTerminatedEvent applies to the entire
connection,
@@ -293,6 +310,23 @@
self._write_outgoing_data(request)
+ def _receive_remote_settings_change(self, event: h2.events.Event) -> None:
+ max_concurrent_streams = event.changed_settings.get(
+ h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS
+ )
+ if max_concurrent_streams:
+ new_max_streams = min(
+ max_concurrent_streams.new_value,
+ self._h2_state.local_settings.max_concurrent_streams,
+ )
+ if new_max_streams and new_max_streams != self._max_streams:
+ while new_max_streams > self._max_streams:
+ self._max_streams_semaphore.release()
+ self._max_streams += 1
+ while new_max_streams < self._max_streams:
+ self._max_streams_semaphore.acquire()
+ self._max_streams -= 1
+
def _response_closed(self, stream_id: int) -> None:
self._max_streams_semaphore.release()
del self._events[stream_id]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_sync/socks_proxy.py
new/httpcore-0.17.0/httpcore/_sync/socks_proxy.py
--- old/httpcore-0.16.3/httpcore/_sync/socks_proxy.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/_sync/socks_proxy.py 2023-03-16
14:41:19.000000000 +0100
@@ -114,6 +114,7 @@
keepalive_expiry: typing.Optional[float] = None,
http1: bool = True,
http2: bool = False,
+ retries: int = 0,
network_backend: typing.Optional[NetworkBackend] = None,
) -> None:
"""
@@ -154,6 +155,7 @@
http1=http1,
http2=http2,
network_backend=network_backend,
+ retries=retries,
)
self._ssl_context = ssl_context
self._proxy_url = enforce_url(proxy_url, name="proxy_url")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/_trace.py
new/httpcore-0.17.0/httpcore/_trace.py
--- old/httpcore-0.16.3/httpcore/_trace.py 2022-12-20 13:07:43.000000000
+0100
+++ new/httpcore-0.17.0/httpcore/_trace.py 2023-03-16 14:41:19.000000000
+0100
@@ -1,20 +1,42 @@
+import logging
from types import TracebackType
from typing import Any, Dict, Optional, Type
from ._models import Request
+logger = logging.getLogger("httpcore")
+
class Trace:
def __init__(
- self, name: str, request: Request, kwargs: Optional[Dict[str, Any]] =
None
+ self,
+ name: str,
+ request: Optional[Request] = None,
+ kwargs: Optional[Dict[str, Any]] = None,
) -> None:
self.name = name
- self.trace = request.extensions.get("trace")
+ self.trace_extension = (
+ None if request is None else request.extensions.get("trace")
+ )
+ self.debug = logger.isEnabledFor(logging.DEBUG)
self.kwargs = kwargs or {}
self.return_value: Any = None
+ self.should_trace = self.debug or self.trace_extension is not None
+
+ def trace(self, name: str, info: Dict[str, Any]) -> None:
+ if self.trace_extension is not None:
+ self.trace_extension(name, info)
+
+ if self.debug:
+ if not info or "return_value" in info and info["return_value"] is
None:
+ message = name
+ else:
+ args = " ".join([f"{key}={value!r}" for key, value in
info.items()])
+ message = f"{name} {args}"
+ logger.debug(message)
def __enter__(self) -> "Trace":
- if self.trace is not None:
+ if self.should_trace:
info = self.kwargs
self.trace(f"{self.name}.started", info)
return self
@@ -25,7 +47,7 @@
exc_value: Optional[BaseException] = None,
traceback: Optional[TracebackType] = None,
) -> None:
- if self.trace is not None:
+ if self.should_trace:
if exc_value is None:
info = {"return_value": self.return_value}
self.trace(f"{self.name}.complete", info)
@@ -33,10 +55,22 @@
info = {"exception": exc_value}
self.trace(f"{self.name}.failed", info)
+ async def atrace(self, name: str, info: Dict[str, Any]) -> None:
+ if self.trace_extension is not None:
+ await self.trace_extension(name, info)
+
+ if self.debug:
+ if not info or "return_value" in info and info["return_value"] is
None:
+ message = name
+ else:
+ args = " ".join([f"{key}={value!r}" for key, value in
info.items()])
+ message = f"{name} {args}"
+ logger.debug(message)
+
async def __aenter__(self) -> "Trace":
- if self.trace is not None:
+ if self.should_trace:
info = self.kwargs
- await self.trace(f"{self.name}.started", info)
+ await self.atrace(f"{self.name}.started", info)
return self
async def __aexit__(
@@ -45,10 +79,10 @@
exc_value: Optional[BaseException] = None,
traceback: Optional[TracebackType] = None,
) -> None:
- if self.trace is not None:
+ if self.should_trace:
if exc_value is None:
info = {"return_value": self.return_value}
- await self.trace(f"{self.name}.complete", info)
+ await self.atrace(f"{self.name}.complete", info)
else:
info = {"exception": exc_value}
- await self.trace(f"{self.name}.failed", info)
+ await self.atrace(f"{self.name}.failed", info)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/httpcore/backends/mock.py
new/httpcore-0.17.0/httpcore/backends/mock.py
--- old/httpcore-0.16.3/httpcore/backends/mock.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/httpcore/backends/mock.py 2023-03-16
14:41:19.000000000 +0100
@@ -44,6 +44,9 @@
def get_extra_info(self, info: str) -> typing.Any:
return MockSSLObject(http2=self._http2) if info == "ssl_object" else
None
+ def __repr__(self) -> str:
+ return "<httpcore.MockStream>"
+
class MockBackend(NetworkBackend):
def __init__(self, buffer: typing.List[bytes], http2: bool = False) ->
None:
@@ -98,6 +101,9 @@
def get_extra_info(self, info: str) -> typing.Any:
return MockSSLObject(http2=self._http2) if info == "ssl_object" else
None
+ def __repr__(self) -> str:
+ return "<httpcore.AsyncMockStream>"
+
class AsyncMockBackend(AsyncNetworkBackend):
def __init__(self, buffer: typing.List[bytes], http2: bool = False) ->
None:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/mkdocs.yml
new/httpcore-0.17.0/mkdocs.yml
--- old/httpcore-0.16.3/mkdocs.yml 2022-12-20 13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/mkdocs.yml 2023-03-16 14:41:19.000000000 +0100
@@ -13,6 +13,7 @@
- HTTP/2: 'http2.md'
- Async Support: 'async.md'
- Extensions: 'extensions.md'
+ - Logging: 'logging.md'
- Exceptions: 'exceptions.md'
theme:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/requirements.txt
new/httpcore-0.17.0/requirements.txt
--- old/httpcore-0.16.3/requirements.txt 2022-12-20 13:07:43.000000000
+0100
+++ new/httpcore-0.17.0/requirements.txt 2023-03-16 14:41:19.000000000
+0100
@@ -7,8 +7,8 @@
mkdocs==1.4.2
mkdocs-autorefs==0.3.1
mkdocs-material==8.5.7
-mkdocs-material-extensions==1.1
-mkdocstrings[python-legacy]==0.19.0
+mkdocs-material-extensions==1.1.1
+mkdocstrings[python-legacy]==0.19.1
jinja2==3.1.2
# Packaging
@@ -18,10 +18,10 @@
# Tests & Linting
anyio==3.6.2
autoflake==1.7.7
-black==22.8.0
+black==23.1.0
coverage==6.5.0
flake8==3.9.2 # See: https://github.com/PyCQA/flake8/pull/1438
-isort==5.10.1
+isort==5.11.4
importlib-metadata==4.13.0
mypy==0.991
trio-typing==0.7.0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_async/test_connection_pool.py
new/httpcore-0.17.0/tests/_async/test_connection_pool.py
--- old/httpcore-0.16.3/tests/_async/test_connection_pool.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_async/test_connection_pool.py 2023-03-16
14:41:19.000000000 +0100
@@ -1,3 +1,4 @@
+import logging
from typing import List, Optional
import pytest
@@ -160,6 +161,74 @@
]
[email protected]
+async def test_debug_request(caplog):
+ """
+ The 'trace' request extension allows for a callback function to inspect the
+ internal events that occur while sending a request.
+ """
+ caplog.set_level(logging.DEBUG)
+
+ network_backend = AsyncMockBackend(
+ [
+ b"HTTP/1.1 200 OK\r\n",
+ b"Content-Type: plain/text\r\n",
+ b"Content-Length: 13\r\n",
+ b"\r\n",
+ b"Hello, world!",
+ ]
+ )
+
+ async with AsyncConnectionPool(network_backend=network_backend) as pool:
+ await pool.request("GET", "http://example.com/")
+
+ assert caplog.record_tuples == [
+ (
+ "httpcore",
+ logging.DEBUG,
+ "connection.connect_tcp.started host='example.com' port=80
local_address=None timeout=None",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "connection.connect_tcp.complete
return_value=<httpcore.AsyncMockStream>",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.send_request_headers.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.send_request_headers.complete"),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.send_request_body.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.send_request_body.complete"),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_headers.started request=<Request
[b'GET']>",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_headers.complete return_value="
+ "(b'HTTP/1.1', 200, b'OK', [(b'Content-Type', b'plain/text'),
(b'Content-Length', b'13')])",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_body.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.receive_response_body.complete"),
+ ("httpcore", logging.DEBUG, "http11.response_closed.started"),
+ ("httpcore", logging.DEBUG, "http11.response_closed.complete"),
+ ("httpcore", logging.DEBUG, "connection.close.started"),
+ ("httpcore", logging.DEBUG, "connection.close.complete"),
+ ]
+
+
@pytest.mark.anyio
async def test_connection_pool_with_http_exception():
"""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_async/test_http11.py
new/httpcore-0.17.0/tests/_async/test_http11.py
--- old/httpcore-0.16.3/tests/_async/test_http11.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_async/test_http11.py 2023-03-16
14:41:19.000000000 +0100
@@ -310,3 +310,27 @@
)
assert response.status == 200
assert response.content == b"<html>Hello, world! ...</html>"
+
+
[email protected]
+async def test_http11_header_sub_100kb():
+ """
+ A connection should be able to handle a http header size up to 100kB.
+ """
+ origin = Origin(b"https", b"example.com", 443)
+ stream = AsyncMockStream(
+ [
+ b"HTTP/1.1 200 OK\r\n", # 17
+ b"Content-Type: plain/text\r\n", # 43
+ b"Cookie: " + b"x" * (100 * 1024 - 72) + b"\r\n", # 102381
+ b"Content-Length: 0\r\n", # 102400
+ b"\r\n",
+ b"",
+ ]
+ )
+ async with AsyncHTTP11Connection(
+ origin=origin, stream=stream, keepalive_expiry=5.0
+ ) as conn:
+ response = await conn.request("GET", "https://example.com/")
+ assert response.status == 200
+ assert response.content == b""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_async/test_http2.py
new/httpcore-0.17.0/tests/_async/test_http2.py
--- old/httpcore-0.16.3/tests/_async/test_http2.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_async/test_http2.py 2023-03-16
14:41:19.000000000 +0100
@@ -294,3 +294,55 @@
async with AsyncHTTP2Connection(origin=origin, stream=stream) as conn:
with pytest.raises(RuntimeError):
await conn.request("GET", "https://other.com/")
+
+
[email protected]
+async def test_http2_remote_max_streams_update():
+ """
+ If the remote server updates the maximum concurrent streams value, we
should
+ be adjusting how many streams we will allow.
+ """
+ origin = Origin(b"https", b"example.com", 443)
+ stream = AsyncMockStream(
+ [
+ hyperframe.frame.SettingsFrame(
+
settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1000}
+ ).serialize(),
+ hyperframe.frame.HeadersFrame(
+ stream_id=1,
+ data=hpack.Encoder().encode(
+ [
+ (b":status", b"200"),
+ (b"content-type", b"plain/text"),
+ ]
+ ),
+ flags=["END_HEADERS"],
+ ).serialize(),
+ hyperframe.frame.DataFrame(stream_id=1, data=b"Hello,
world!").serialize(),
+ hyperframe.frame.SettingsFrame(
+
settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 50}
+ ).serialize(),
+ hyperframe.frame.DataFrame(
+ stream_id=1, data=b"Hello, world...again!",
flags=["END_STREAM"]
+ ).serialize(),
+ ]
+ )
+ async with AsyncHTTP2Connection(origin=origin, stream=stream) as conn:
+ async with conn.stream("GET", "https://example.com/") as response:
+ i = 0
+ async for chunk in response.aiter_stream():
+ if i == 0:
+ assert chunk == b"Hello, world!"
+ assert
conn._h2_state.remote_settings.max_concurrent_streams == 1000
+ assert conn._max_streams == min(
+ conn._h2_state.remote_settings.max_concurrent_streams,
+ conn._h2_state.local_settings.max_concurrent_streams,
+ )
+ elif i == 1:
+ assert chunk == b"Hello, world...again!"
+ assert
conn._h2_state.remote_settings.max_concurrent_streams == 50
+ assert conn._max_streams == min(
+ conn._h2_state.remote_settings.max_concurrent_streams,
+ conn._h2_state.local_settings.max_concurrent_streams,
+ )
+ i += 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_sync/test_connection_pool.py
new/httpcore-0.17.0/tests/_sync/test_connection_pool.py
--- old/httpcore-0.16.3/tests/_sync/test_connection_pool.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_sync/test_connection_pool.py 2023-03-16
14:41:19.000000000 +0100
@@ -1,3 +1,4 @@
+import logging
from typing import List, Optional
import pytest
@@ -160,6 +161,74 @@
]
+
+def test_debug_request(caplog):
+ """
+ The 'trace' request extension allows for a callback function to inspect the
+ internal events that occur while sending a request.
+ """
+ caplog.set_level(logging.DEBUG)
+
+ network_backend = MockBackend(
+ [
+ b"HTTP/1.1 200 OK\r\n",
+ b"Content-Type: plain/text\r\n",
+ b"Content-Length: 13\r\n",
+ b"\r\n",
+ b"Hello, world!",
+ ]
+ )
+
+ with ConnectionPool(network_backend=network_backend) as pool:
+ pool.request("GET", "http://example.com/")
+
+ assert caplog.record_tuples == [
+ (
+ "httpcore",
+ logging.DEBUG,
+ "connection.connect_tcp.started host='example.com' port=80
local_address=None timeout=None",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "connection.connect_tcp.complete
return_value=<httpcore.MockStream>",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.send_request_headers.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.send_request_headers.complete"),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.send_request_body.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.send_request_body.complete"),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_headers.started request=<Request
[b'GET']>",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_headers.complete return_value="
+ "(b'HTTP/1.1', 200, b'OK', [(b'Content-Type', b'plain/text'),
(b'Content-Length', b'13')])",
+ ),
+ (
+ "httpcore",
+ logging.DEBUG,
+ "http11.receive_response_body.started request=<Request [b'GET']>",
+ ),
+ ("httpcore", logging.DEBUG, "http11.receive_response_body.complete"),
+ ("httpcore", logging.DEBUG, "http11.response_closed.started"),
+ ("httpcore", logging.DEBUG, "http11.response_closed.complete"),
+ ("httpcore", logging.DEBUG, "connection.close.started"),
+ ("httpcore", logging.DEBUG, "connection.close.complete"),
+ ]
+
+
def test_connection_pool_with_http_exception():
"""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_sync/test_http11.py
new/httpcore-0.17.0/tests/_sync/test_http11.py
--- old/httpcore-0.16.3/tests/_sync/test_http11.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_sync/test_http11.py 2023-03-16
14:41:19.000000000 +0100
@@ -310,3 +310,27 @@
)
assert response.status == 200
assert response.content == b"<html>Hello, world! ...</html>"
+
+
+
+def test_http11_header_sub_100kb():
+ """
+ A connection should be able to handle a http header size up to 100kB.
+ """
+ origin = Origin(b"https", b"example.com", 443)
+ stream = MockStream(
+ [
+ b"HTTP/1.1 200 OK\r\n", # 17
+ b"Content-Type: plain/text\r\n", # 43
+ b"Cookie: " + b"x" * (100 * 1024 - 72) + b"\r\n", # 102381
+ b"Content-Length: 0\r\n", # 102400
+ b"\r\n",
+ b"",
+ ]
+ )
+ with HTTP11Connection(
+ origin=origin, stream=stream, keepalive_expiry=5.0
+ ) as conn:
+ response = conn.request("GET", "https://example.com/")
+ assert response.status == 200
+ assert response.content == b""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/tests/_sync/test_http2.py
new/httpcore-0.17.0/tests/_sync/test_http2.py
--- old/httpcore-0.16.3/tests/_sync/test_http2.py 2022-12-20
13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/tests/_sync/test_http2.py 2023-03-16
14:41:19.000000000 +0100
@@ -294,3 +294,55 @@
with HTTP2Connection(origin=origin, stream=stream) as conn:
with pytest.raises(RuntimeError):
conn.request("GET", "https://other.com/")
+
+
+
+def test_http2_remote_max_streams_update():
+ """
+ If the remote server updates the maximum concurrent streams value, we
should
+ be adjusting how many streams we will allow.
+ """
+ origin = Origin(b"https", b"example.com", 443)
+ stream = MockStream(
+ [
+ hyperframe.frame.SettingsFrame(
+
settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1000}
+ ).serialize(),
+ hyperframe.frame.HeadersFrame(
+ stream_id=1,
+ data=hpack.Encoder().encode(
+ [
+ (b":status", b"200"),
+ (b"content-type", b"plain/text"),
+ ]
+ ),
+ flags=["END_HEADERS"],
+ ).serialize(),
+ hyperframe.frame.DataFrame(stream_id=1, data=b"Hello,
world!").serialize(),
+ hyperframe.frame.SettingsFrame(
+
settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 50}
+ ).serialize(),
+ hyperframe.frame.DataFrame(
+ stream_id=1, data=b"Hello, world...again!",
flags=["END_STREAM"]
+ ).serialize(),
+ ]
+ )
+ with HTTP2Connection(origin=origin, stream=stream) as conn:
+ with conn.stream("GET", "https://example.com/") as response:
+ i = 0
+ for chunk in response.iter_stream():
+ if i == 0:
+ assert chunk == b"Hello, world!"
+ assert
conn._h2_state.remote_settings.max_concurrent_streams == 1000
+ assert conn._max_streams == min(
+ conn._h2_state.remote_settings.max_concurrent_streams,
+ conn._h2_state.local_settings.max_concurrent_streams,
+ )
+ elif i == 1:
+ assert chunk == b"Hello, world...again!"
+ assert
conn._h2_state.remote_settings.max_concurrent_streams == 50
+ assert conn._max_streams == min(
+ conn._h2_state.remote_settings.max_concurrent_streams,
+ conn._h2_state.local_settings.max_concurrent_streams,
+ )
+ i += 1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/httpcore-0.16.3/unasync.py
new/httpcore-0.17.0/unasync.py
--- old/httpcore-0.16.3/unasync.py 2022-12-20 13:07:43.000000000 +0100
+++ new/httpcore-0.17.0/unasync.py 2023-03-16 14:41:19.000000000 +0100
@@ -18,6 +18,7 @@
('aclose', 'close'),
('aclose_func', 'close_func'),
('aiterator', 'iterator'),
+ ('aiter_stream', 'iter_stream'),
('aread', 'read'),
('asynccontextmanager', 'contextmanager'),
('__aenter__', '__enter__'),