Package: release.debian.org
Severity: normal
User: release.debian....@packages.debian.org
Usertags: unblock
X-Debbugs-Cc: python-werkz...@packages.debian.org
Control: affects -1 + src:python-werkzeug

Please unblock package python-werkzeug

[ Reason ]
This last upload adds patches to fix the CVEs in $subject.

[ Impact ]
Vulnerability descriped in #1031370.

[ Tests ]
Package has unit tests at build time and in autopkgtest.

[ Risks ]
The patch is strait from upstream, it should be fine. I haven't
tested for regressions myself though, but unit tests should be
covering it.

[ Checklist ]
  [x] all changes are documented in the d/changelog
  [x] I reviewed all changes and I approve them
  [x] attach debdiff against the package in testing

unblock python-werkzeug/2.2.2-3
diff -Nru python-werkzeug-2.2.2/debian/changelog 
python-werkzeug-2.2.2/debian/changelog
--- python-werkzeug-2.2.2/debian/changelog      2022-11-30 13:52:03.000000000 
+0100
+++ python-werkzeug-2.2.2/debian/changelog      2023-04-21 13:37:22.000000000 
+0200
@@ -1,3 +1,11 @@
+python-werkzeug (2.2.2-3) unstable; urgency=medium
+
+  [ Robin Gustafsson ]
+  * Fix security vulnerabilities
+    (CVE-2023-23934, CVE-2023-25577, Closes: #1031370)
+
+ -- Thomas Goirand <z...@debian.org>  Fri, 21 Apr 2023 13:37:22 +0200
+
 python-werkzeug (2.2.2-2) unstable; urgency=medium
 
   * Uploading to unstable.
diff -Nru 
python-werkzeug-2.2.2/debian/patches/0003-don-t-strip-leading-when-parsing-cookie.patch
 
python-werkzeug-2.2.2/debian/patches/0003-don-t-strip-leading-when-parsing-cookie.patch
--- 
python-werkzeug-2.2.2/debian/patches/0003-don-t-strip-leading-when-parsing-cookie.patch
     1970-01-01 01:00:00.000000000 +0100
+++ 
python-werkzeug-2.2.2/debian/patches/0003-don-t-strip-leading-when-parsing-cookie.patch
     2023-04-21 13:37:22.000000000 +0200
@@ -0,0 +1,83 @@
+Description: CVE-2023-23934: don't strip leading = when parsing cookie
+ Applied-Upstream: 2.2.3
+Author: David Lord <david...@gmail.com>
+Date: Tue, 31 Jan 2023 14:29:34 -0800
+Origin: upstream, 
https://github.com/pallets/werkzeug/commit/cf275f42acad1b5950c50ffe8ef58fe62cdce028
+Bug-Debian: https://bugs.debian.org/1031370
+Last-Update: 2023-04-21
+
+diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py
+index 4636647..f95207a 100644
+--- a/src/werkzeug/_internal.py
++++ b/src/werkzeug/_internal.py
+@@ -34,7 +34,7 @@ _quote_re = re.compile(rb"[\\].")
+ _legal_cookie_chars_re = rb"[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
+ _cookie_re = re.compile(
+     rb"""
+-    (?P<key>[^=;]+)
++    (?P<key>[^=;]*)
+     (?:\s*=\s*
+         (?P<val>
+             "(?:[^\\"]|\\.)*" |
+@@ -382,16 +382,21 @@ def _cookie_parse_impl(b: bytes) -> 
t.Iterator[t.Tuple[bytes, bytes]]:
+     """Lowlevel cookie parsing facility that operates on bytes."""
+     i = 0
+     n = len(b)
++    b += b";"
+ 
+     while i < n:
+-        match = _cookie_re.search(b + b";", i)
++        match = _cookie_re.match(b, i)
++
+         if not match:
+             break
+ 
+-        key = match.group("key").strip()
+-        value = match.group("val") or b""
+         i = match.end(0)
++        key = match.group("key").strip()
++
++        if not key:
++            continue
+ 
++        value = match.group("val") or b""
+         yield key, _cookie_unquote(value)
+ 
+ 
+diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py
+index 8288882..6b22738 100644
+--- a/src/werkzeug/sansio/http.py
++++ b/src/werkzeug/sansio/http.py
+@@ -126,10 +126,6 @@ def parse_cookie(
+     def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]:
+         for key, val in _cookie_parse_impl(cookie):  # type: ignore
+             key_str = _to_str(key, charset, errors, allow_none_charset=True)
+-
+-            if not key_str:
+-                continue
+-
+             val_str = _to_str(val, charset, errors, allow_none_charset=True)
+             yield key_str, val_str
+ 
+diff --git a/tests/test_http.py b/tests/test_http.py
+index 3760dc1..999549e 100644
+--- a/tests/test_http.py
++++ b/tests/test_http.py
+@@ -411,7 +411,8 @@ class TestHTTPUtility:
+     def test_parse_cookie(self):
+         cookies = http.parse_cookie(
+             "dismiss-top=6; CP=null*; 
PHPSESSID=0a539d42abc001cdc762809248d4beed;"
+-            'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d'
++            'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d;'
++            "==__Host-eq=bad;__Host-eq=good;"
+         )
+         assert cookies.to_dict() == {
+             "CP": "null*",
+@@ -422,6 +423,7 @@ class TestHTTPUtility:
+             "fo234{": "bar",
+             "blub": "Blah",
+             '"__Secure-c"': "d",
++            "__Host-eq": "good",
+         }
+ 
+     def test_dump_cookie(self):
diff -Nru 
python-werkzeug-2.2.2/debian/patches/0004-limit-the-maximum-number-of-multipart-form-parts.patch
 
python-werkzeug-2.2.2/debian/patches/0004-limit-the-maximum-number-of-multipart-form-parts.patch
--- 
python-werkzeug-2.2.2/debian/patches/0004-limit-the-maximum-number-of-multipart-form-parts.patch
    1970-01-01 01:00:00.000000000 +0100
+++ 
python-werkzeug-2.2.2/debian/patches/0004-limit-the-maximum-number-of-multipart-form-parts.patch
    2023-04-21 13:37:22.000000000 +0200
@@ -0,0 +1,201 @@
+Description: CVE-2023-25577: limit the maximum number of multipart form parts
+ Applied-Upstream: 2.2.3
+Author: David Lord <david...@gmail.com>
+Date: Tue, 14 Feb 2023 09:08:57 -0800
+Origin: upstream, 
https://github.com/pallets/werkzeug/commit/517cac5a804e8c4dc4ed038bb20dacd038e7a9f1
+Bug-Debian: https://bugs.debian.org/1031370
+Last-Update: 2023-04-21
+
+diff --git a/docs/request_data.rst b/docs/request_data.rst
+index 83c6278..e55841e 100644
+--- a/docs/request_data.rst
++++ b/docs/request_data.rst
+@@ -73,23 +73,26 @@ read the stream *or* call :meth:`~Request.get_data`.
+ Limiting Request Data
+ ---------------------
+ 
+-To avoid being the victim of a DDOS attack you can set the maximum
+-accepted content length and request field sizes.  The :class:`Request`
+-class has two attributes for that: :attr:`~Request.max_content_length`
+-and :attr:`~Request.max_form_memory_size`.
+-
+-The first one can be used to limit the total content length.  For example
+-by setting it to ``1024 * 1024 * 16`` the request won't accept more than
+-16MB of transmitted data.
+-
+-Because certain data can't be moved to the hard disk (regular post data)
+-whereas temporary files can, there is a second limit you can set.  The
+-:attr:`~Request.max_form_memory_size` limits the size of `POST`
+-transmitted form data.  By setting it to ``1024 * 1024 * 2`` you can make
+-sure that all in memory-stored fields are not more than 2MB in size.
+-
+-This however does *not* affect in-memory stored files if the
+-`stream_factory` used returns a in-memory file.
++The :class:`Request` class provides a few attributes to control how much data 
is
++processed from the request body. This can help mitigate DoS attacks that 
craft the
++request in such a way that the server uses too many resources to handle it. 
Each of
++these limits will raise a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` 
if they are
++exceeded.
++
++-   :attr:`~Request.max_content_length` Stop reading request data after this 
number
++    of bytes. It's better to configure this in the WSGI server or HTTP 
server, rather
++    than the WSGI application.
++-   :attr:`~Request.max_form_memory_size` Stop reading request data if any 
form part is
++    larger than this number of bytes. While file parts can be moved to disk, 
regular
++    form field data is stored in memory only.
++-   :attr:`~Request.max_form_parts` Stop reading request data if more than 
this number
++    of parts are sent in multipart form data. This is useful to stop a very 
large number
++    of very small parts, especially file parts. The default is 1000.
++
++Using Werkzeug to set these limits is only one layer of protection. WSGI 
servers
++and HTTPS servers should set their own limits on size and timeouts. The 
operating system
++or container manager should set limits on memory and processing time for 
server
++processes.
+ 
+ 
+ How to extend Parsing?
+diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
+index 10d58ca..bebb2fc 100644
+--- a/src/werkzeug/formparser.py
++++ b/src/werkzeug/formparser.py
+@@ -179,6 +179,8 @@ class FormDataParser:
+     :param cls: an optional dict class to use.  If this is not specified
+                        or `None` the default :class:`MultiDict` is used.
+     :param silent: If set to False parsing errors will not be caught.
++    :param max_form_parts: The maximum number of parts to be parsed. If this 
is
++        exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is 
raised.
+     """
+ 
+     def __init__(
+@@ -190,6 +192,8 @@ class FormDataParser:
+         max_content_length: t.Optional[int] = None,
+         cls: t.Optional[t.Type[MultiDict]] = None,
+         silent: bool = True,
++        *,
++        max_form_parts: t.Optional[int] = None,
+     ) -> None:
+         if stream_factory is None:
+             stream_factory = default_stream_factory
+@@ -199,6 +203,7 @@ class FormDataParser:
+         self.errors = errors
+         self.max_form_memory_size = max_form_memory_size
+         self.max_content_length = max_content_length
++        self.max_form_parts = max_form_parts
+ 
+         if cls is None:
+             cls = MultiDict
+@@ -281,6 +286,7 @@ class FormDataParser:
+             self.errors,
+             max_form_memory_size=self.max_form_memory_size,
+             cls=self.cls,
++            max_form_parts=self.max_form_parts,
+         )
+         boundary = options.get("boundary", "").encode("ascii")
+ 
+@@ -346,10 +352,12 @@ class MultiPartParser:
+         max_form_memory_size: t.Optional[int] = None,
+         cls: t.Optional[t.Type[MultiDict]] = None,
+         buffer_size: int = 64 * 1024,
++        max_form_parts: t.Optional[int] = None,
+     ) -> None:
+         self.charset = charset
+         self.errors = errors
+         self.max_form_memory_size = max_form_memory_size
++        self.max_form_parts = max_form_parts
+ 
+         if stream_factory is None:
+             stream_factory = default_stream_factory
+@@ -409,7 +417,9 @@ class MultiPartParser:
+             [None],
+         )
+ 
+-        parser = MultipartDecoder(boundary, self.max_form_memory_size)
++        parser = MultipartDecoder(
++            boundary, self.max_form_memory_size, max_parts=self.max_form_parts
++        )
+ 
+         fields = []
+         files = []
+diff --git a/src/werkzeug/sansio/multipart.py 
b/src/werkzeug/sansio/multipart.py
+index d8abeb3..2684e5d 100644
+--- a/src/werkzeug/sansio/multipart.py
++++ b/src/werkzeug/sansio/multipart.py
+@@ -87,10 +87,13 @@ class MultipartDecoder:
+         self,
+         boundary: bytes,
+         max_form_memory_size: Optional[int] = None,
++        *,
++        max_parts: Optional[int] = None,
+     ) -> None:
+         self.buffer = bytearray()
+         self.complete = False
+         self.max_form_memory_size = max_form_memory_size
++        self.max_parts = max_parts
+         self.state = State.PREAMBLE
+         self.boundary = boundary
+ 
+@@ -118,6 +121,7 @@ class MultipartDecoder:
+             re.MULTILINE,
+         )
+         self._search_position = 0
++        self._parts_decoded = 0
+ 
+     def last_newline(self) -> int:
+         try:
+@@ -191,6 +195,10 @@ class MultipartDecoder:
+                     )
+                 self.state = State.DATA
+                 self._search_position = 0
++                self._parts_decoded += 1
++
++                if self.max_parts is not None and self._parts_decoded > 
self.max_parts:
++                    raise RequestEntityTooLarge()
+             else:
+                 # Update the search start position to be equal to the
+                 # current buffer length (already searched) minus a
+diff --git a/src/werkzeug/wrappers/request.py 
b/src/werkzeug/wrappers/request.py
+index 57b739c..a6d5429 100644
+--- a/src/werkzeug/wrappers/request.py
++++ b/src/werkzeug/wrappers/request.py
+@@ -83,6 +83,13 @@ class Request(_SansIORequest):
+     #: .. versionadded:: 0.5
+     max_form_memory_size: t.Optional[int] = None
+ 
++    #: The maximum number of multipart parts to parse, passed to
++    #: :attr:`form_data_parser_class`. Parsing form data with more than this
++    #: many parts will raise :exc:`~.RequestEntityTooLarge`.
++    #:
++    #: .. versionadded:: 2.2.3
++    max_form_parts = 1000
++
+     #: The form data parser that should be used.  Can be replaced to customize
+     #: the form date parsing.
+     form_data_parser_class: t.Type[FormDataParser] = FormDataParser
+@@ -246,6 +253,7 @@ class Request(_SansIORequest):
+             self.max_form_memory_size,
+             self.max_content_length,
+             self.parameter_storage_class,
++            max_form_parts=self.max_form_parts,
+         )
+ 
+     def _load_form_data(self) -> None:
+diff --git a/tests/test_formparser.py b/tests/test_formparser.py
+index 49010b4..4c518b1 100644
+--- a/tests/test_formparser.py
++++ b/tests/test_formparser.py
+@@ -127,6 +127,15 @@ class TestFormParser:
+         req.max_form_memory_size = 400
+         assert req.form["foo"] == "Hello World"
+ 
++        req = Request.from_values(
++            input_stream=io.BytesIO(data),
++            content_length=len(data),
++            content_type="multipart/form-data; boundary=foo",
++            method="POST",
++        )
++        req.max_form_parts = 1
++        pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
++
+     def test_missing_multipart_boundary(self):
+         data = (
+             b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n"
diff -Nru python-werkzeug-2.2.2/debian/patches/series 
python-werkzeug-2.2.2/debian/patches/series
--- python-werkzeug-2.2.2/debian/patches/series 2022-11-30 13:52:03.000000000 
+0100
+++ python-werkzeug-2.2.2/debian/patches/series 2023-04-21 13:37:22.000000000 
+0200
@@ -1,2 +1,4 @@
 preserve-any-existing-PYTHONPATH-in-tests.patch
 remove-test_exclude_patterns-test.patch
+0003-don-t-strip-leading-when-parsing-cookie.patch
+0004-limit-the-maximum-number-of-multipart-form-parts.patch

Reply via email to