Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-msal for openSUSE:Factory 
checked in at 2025-04-29 16:42:48
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-msal (Old)
 and      /work/SRC/openSUSE:Factory/.python-msal.new.30101 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-msal"

Tue Apr 29 16:42:48 2025 rev:28 rq:1273419 version:1.32.3

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-msal/python-msal.changes  2025-03-19 
22:33:34.689758177 +0100
+++ /work/SRC/openSUSE:Factory/.python-msal.new.30101/python-msal.changes       
2025-04-29 16:43:06.825078516 +0200
@@ -1,0 +2,11 @@
+Tue Apr 29 11:12:18 UTC 2025 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- Update to version 1.32.3
+  * Fix a regression on Azure Arc / on-prem servers (#814, #815)
+- from version 1.32.2
+  * Bugfix for Authentication Failed: MsalResponse object has no
+    attribute 'headers' (#812)
+- from version 1.32.1
+  * Optimization on cache
+
+-------------------------------------------------------------------

Old:
----
  msal-1.32.0.tar.gz

New:
----
  msal-1.32.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-msal.spec ++++++
--- /var/tmp/diff_new_pack.Pz32PJ/_old  2025-04-29 16:43:07.357100861 +0200
+++ /var/tmp/diff_new_pack.Pz32PJ/_new  2025-04-29 16:43:07.361101030 +0200
@@ -18,7 +18,7 @@
 
 %{?sle15_python_module_pythons}
 Name:           python-msal
-Version:        1.32.0
+Version:        1.32.3
 Release:        0
 Summary:        Microsoft Authentication Library (MSAL) for Python
 License:        MIT

++++++ msal-1.32.0.tar.gz -> msal-1.32.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/PKG-INFO new/msal-1.32.3/PKG-INFO
--- old/msal-1.32.0/PKG-INFO    2025-03-12 22:23:46.646134400 +0100
+++ new/msal-1.32.3/PKG-INFO    2025-04-25 15:12:29.590472200 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.4
 Name: msal
-Version: 1.32.0
+Version: 1.32.3
 Summary: The Microsoft Authentication Library (MSAL) for Python library 
enables your app to access the Microsoft Cloud by supporting authentication of 
users with Microsoft Azure Active Directory accounts (AAD) and Microsoft 
Accounts (MSA) using industry standard OAuth2 and OpenID Connect.
 Home-page: 
https://github.com/AzureAD/microsoft-authentication-library-for-python
 Author: Microsoft Corporation
@@ -31,6 +31,7 @@
 Provides-Extra: broker
 Requires-Dist: pymsalruntime<0.18,>=0.14; (python_version >= "3.6" and 
platform_system == "Windows") and extra == "broker"
 Requires-Dist: pymsalruntime<0.18,>=0.17; (python_version >= "3.8" and 
platform_system == "Darwin") and extra == "broker"
+Dynamic: license-file
 
 # Microsoft Authentication Library (MSAL) for Python
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal/application.py 
new/msal-1.32.3/msal/application.py
--- old/msal-1.32.0/msal/application.py 2025-03-12 22:23:40.000000000 +0100
+++ new/msal-1.32.3/msal/application.py 2025-04-25 15:12:24.000000000 +0200
@@ -499,6 +499,7 @@
                 except (
                         FileNotFoundError,  # Or IOError in Python 2
                         pickle.UnpicklingError,  # A corrupted http cache file
+                        AttributeError,  # Cache created by a different 
version of MSAL
                         ):
                     persisted_http_cache = {}  # Recover by starting afresh
                 atexit.register(lambda: pickle.dump(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal/individual_cache.py 
new/msal-1.32.3/msal/individual_cache.py
--- old/msal-1.32.0/msal/individual_cache.py    2025-03-12 22:23:40.000000000 
+0100
+++ new/msal-1.32.3/msal/individual_cache.py    2025-04-25 15:12:24.000000000 
+0200
@@ -59,6 +59,10 @@
         self._expires_in = expires_in
         self._lock = Lock() if lock is None else lock
 
+    def _peek(self):
+        # Returns (sequence, timestamps) without triggering maintenance
+        return self._mapping.get(self._INDEX, ([], {}))
+
     def _validate_key(self, key):
         if key == self._INDEX:
             raise ValueError("key {} is a reserved keyword in {}".format(
@@ -85,7 +89,7 @@
         # This internal implementation powers both set() and __setitem__(),
         # so that they don't depend on each other.
         self._validate_key(key)
-        sequence, timestamps = self._mapping.get(self._INDEX, ([], {}))
+        sequence, timestamps = self._peek()
         self._maintenance(sequence, timestamps)  # O(logN)
         now = int(time.time())
         expires_at = now + expires_in
@@ -136,7 +140,7 @@
         self._validate_key(key)
         with self._lock:
             # Skip self._maintenance(), because it would need O(logN) time
-            sequence, timestamps = self._mapping.get(self._INDEX, ([], {}))
+            sequence, timestamps = self._peek()
             expires_at, created_at = timestamps[key]  # Would raise KeyError 
accordingly
             now = int(time.time())
             if not created_at <= now < expires_at:
@@ -155,14 +159,14 @@
         with self._lock:
             # Skip self._maintenance(), because it would need O(logN) time
             self._mapping.pop(key, None)  # O(1)
-            sequence, timestamps = self._mapping.get(self._INDEX, ([], {}))
+            sequence, timestamps = self._peek()
             del timestamps[key]  # O(1)
             self._mapping[self._INDEX] = sequence, timestamps
 
     def __len__(self):  # O(logN)
         """Drop all expired items and return the remaining length"""
         with self._lock:
-            sequence, timestamps = self._mapping.get(self._INDEX, ([], {}))
+            sequence, timestamps = self._peek()
             self._maintenance(sequence, timestamps)  # O(logN)
             self._mapping[self._INDEX] = sequence, timestamps
             return len(timestamps)  # Faster than iter(self._mapping) when it 
is on disk
@@ -170,7 +174,7 @@
     def __iter__(self):
         """Drop all expired items and return an iterator of the remaining 
items"""
         with self._lock:
-            sequence, timestamps = self._mapping.get(self._INDEX, ([], {}))
+            sequence, timestamps = self._peek()
             self._maintenance(sequence, timestamps)  # O(logN)
             self._mapping[self._INDEX] = sequence, timestamps
         return iter(timestamps)  # Faster than iter(self._mapping) when it is 
on disk
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal/managed_identity.py 
new/msal-1.32.3/msal/managed_identity.py
--- old/msal-1.32.0/msal/managed_identity.py    2025-03-12 22:23:40.000000000 
+0100
+++ new/msal-1.32.3/msal/managed_identity.py    2025-04-25 15:12:24.000000000 
+0200
@@ -112,8 +112,8 @@
 
 
 class _ThrottledHttpClient(ThrottledHttpClientBase):
-    def __init__(self, http_client, **kwargs):
-        super(_ThrottledHttpClient, self).__init__(http_client, **kwargs)
+    def __init__(self, *args, **kwargs):
+        super(_ThrottledHttpClient, self).__init__(*args, **kwargs)
         self.get = IndividualCache(  # All MIs (except Cloud Shell) use GETs
             mapping=self._expiring_mapping,
             key_maker=lambda func, args, kwargs: "REQ {} hash={} 
429/5xx/Retry-After".format(
@@ -124,7 +124,7 @@
                     str(kwargs.get("params")) + str(kwargs.get("data"))),
                 ),
             expires_in=RetryAfterParser(5).parse,  # 5 seconds default for 
non-PCA
-            )(http_client.get)
+            )(self.get)  # Note: Decorate the parent get(), not the 
http_client.get()
 
 
 class ManagedIdentityClient(object):
@@ -233,8 +233,7 @@
             #    (especially for 410 which was supposed to be a permanent 
failure).
             # 2. MI on Service Fabric specifically suggests to not retry on 
404.
             #    ( 
https://learn.microsoft.com/en-us/azure/service-fabric/how-to-managed-cluster-managed-identity-service-fabric-app-code#error-handling
 )
-            http_client.http_client  # Patch the raw (unpatched) http client
-                if isinstance(http_client, ThrottledHttpClientBase) else 
http_client,
+            http_client,
             http_cache=http_cache,
         )
         self._token_cache = token_cache or TokenCache()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal/sku.py new/msal-1.32.3/msal/sku.py
--- old/msal-1.32.0/msal/sku.py 2025-03-12 22:23:40.000000000 +0100
+++ new/msal-1.32.3/msal/sku.py 2025-04-25 15:12:24.000000000 +0200
@@ -2,5 +2,5 @@
 """
 
 # The __init__.py will import this. Not the other way around.
-__version__ = "1.32.0"
+__version__ = "1.32.3"
 SKU = "MSAL.Python"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal/throttled_http_client.py 
new/msal-1.32.3/msal/throttled_http_client.py
--- old/msal-1.32.0/msal/throttled_http_client.py       2025-03-12 
22:23:40.000000000 +0100
+++ new/msal-1.32.3/msal/throttled_http_client.py       2025-04-25 
15:12:24.000000000 +0200
@@ -3,26 +3,34 @@
 
 from .individual_cache import _IndividualCache as IndividualCache
 from .individual_cache import _ExpiringMapping as ExpiringMapping
+from .oauth2cli.http import Response
+from .exceptions import MsalServiceError
 
 
 # https://datatracker.ietf.org/doc/html/rfc8628#section-3.4
 DEVICE_AUTH_GRANT = "urn:ietf:params:oauth:grant-type:device_code"
 
 
+def _get_headers(response):
+    # MSAL's HttpResponse did not have headers until 1.23.0
+    # 
https://github.com/AzureAD/microsoft-authentication-library-for-python/pull/581/files#diff-28866b706bc3830cd20485685f20fe79d45b58dce7050e68032e9d9372d68654R61
+    # This helper ensures graceful degradation to {} without exception
+    return getattr(response, "headers", {})
+
+
 class RetryAfterParser(object):
+    FIELD_NAME_LOWER = "Retry-After".lower()
     def __init__(self, default_value=None):
         self._default_value = 5 if default_value is None else default_value
 
     def parse(self, *, result, **ignored):
         """Return seconds to throttle"""
         response = result
-        lowercase_headers = {k.lower(): v for k, v in getattr(
-            # Historically, MSAL's HttpResponse does not always have headers
-            response, "headers", {}).items()}
+        lowercase_headers = {k.lower(): v for k, v in 
_get_headers(response).items()}
         if not (response.status_code == 429 or response.status_code >= 500
-                or "retry-after" in lowercase_headers):
+                or self.FIELD_NAME_LOWER in lowercase_headers):
             return 0  # Quick exit
-        retry_after = lowercase_headers.get("retry-after", self._default_value)
+        retry_after = lowercase_headers.get(self.FIELD_NAME_LOWER, 
self._default_value)
         try:
             # AAD's retry_after uses integer format only
             # https://stackoverflow.microsoft.com/questions/264931/264932
@@ -37,16 +45,44 @@
     return data.get(key) if isinstance(data, dict) else default
 
 
+class NormalizedResponse(Response):
+    """A http response with the shape defined in Response,
+    but contains only the data we will store in cache.
+    """
+    def __init__(self, raw_response):
+        super().__init__()
+        self.status_code = raw_response.status_code
+        self.text = raw_response.text
+        self.headers = {
+            k.lower(): v for k, v in _get_headers(raw_response).items()
+            # Attempted storing only a small set of headers (such as 
Retry-After),
+            # but it tends to lead to missing information (such as 
WWW-Authenticate).
+            # So we store all headers, which are expected to contain only 
public info,
+            # because we throttle only error responses and public responses.
+        }
+
+    ## Note: Don't use the following line,
+    ## because when being pickled, it will indirectly pickle the whole 
raw_response
+    # self.raise_for_status = raw_response.raise_for_status
+    def raise_for_status(self):
+        if self.status_code >= 400:
+            raise MsalServiceError("HTTP Error: {}".format(self.status_code))
+
+
 class ThrottledHttpClientBase(object):
     """Throttle the given http_client by storing and retrieving data from 
cache.
 
-    This wrapper exists so that our patching post() and get() would prevent
-    re-patching side effect when/if same http_client being reused.
+    This base exists so that:
+    1. These base post() and get() will return a NormalizedResponse
+    2. The base __init__() will NOT re-throttle even if caller accidentally 
nested ThrottledHttpClient.
 
-    The subclass should implement post() and/or get()
+    Subclasses shall only need to dynamically decorate their post() and get() 
methods
+    in their __init__() method.
     """
     def __init__(self, http_client, *, http_cache=None):
-        self.http_client = http_client
+        self.http_client = http_client.http_client if isinstance(
+            # If it is already a ThrottledHttpClientBase, we use its raw 
(unthrottled) http client
+            http_client, ThrottledHttpClientBase) else http_client
         self._expiring_mapping = ExpiringMapping(  # It will automatically 
clean up
             mapping=http_cache if http_cache is not None else {},
             capacity=1024,  # To prevent cache blowing up especially for CCA
@@ -54,10 +90,10 @@
             )
 
     def post(self, *args, **kwargs):
-        return self.http_client.post(*args, **kwargs)
+        return NormalizedResponse(self.http_client.post(*args, **kwargs))
 
     def get(self, *args, **kwargs):
-        return self.http_client.get(*args, **kwargs)
+        return NormalizedResponse(self.http_client.get(*args, **kwargs))
 
     def close(self):
         return self.http_client.close()
@@ -68,12 +104,11 @@
 
 
 class ThrottledHttpClient(ThrottledHttpClientBase):
-    def __init__(self, http_client, *, default_throttle_time=None, **kwargs):
-        super(ThrottledHttpClient, self).__init__(http_client, **kwargs)
-
-        _post = http_client.post  # We'll patch _post, and keep original 
post() intact
-
-        _post = IndividualCache(
+    """A throttled http client that is used by MSAL's non-managed identity 
clients."""
+    def __init__(self, *args, default_throttle_time=None, **kwargs):
+        """Decorate self.post() and self.get() dynamically"""
+        super(ThrottledHttpClient, self).__init__(*args, **kwargs)
+        self.post = IndividualCache(
             # Internal specs requires throttling on at least token endpoint,
             # here we have a generic patch for POST on all endpoints.
             mapping=self._expiring_mapping,
@@ -91,9 +126,9 @@
                                 _extract_data(kwargs, "username")))),  # 
"account" of ROPC
                     ),
             expires_in=RetryAfterParser(default_throttle_time or 5).parse,
-            )(_post)
+            )(self.post)
 
-        _post = IndividualCache(  # It covers the "UI required cache"
+        self.post = IndividualCache(  # It covers the "UI required cache"
             mapping=self._expiring_mapping,
             key_maker=lambda func, args, kwargs: "POST {} hash={} 400".format(
                 args[0],  # It is the url, typically containing authority and 
tenant
@@ -125,12 +160,10 @@
                     isinstance(kwargs.get("data"), dict)
                     and kwargs["data"].get("grant_type") == DEVICE_AUTH_GRANT
                     )
-                and "retry-after" not in set(  # Leave it to the Retry-After 
decorator
-                    h.lower() for h in getattr(result, "headers", {}).keys())
+                and RetryAfterParser.FIELD_NAME_LOWER not in set(  # Otherwise 
leave it to the Retry-After decorator
+                    h.lower() for h in _get_headers(result))
                 else 0,
-            )(_post)
-
-        self.post = _post
+            )(self.post)
 
         self.get = IndividualCache(  # Typically those discovery GETs
             mapping=self._expiring_mapping,
@@ -140,9 +173,4 @@
                 ),
             expires_in=lambda result=None, **ignored:
                 3600*24 if 200 <= result.status_code < 300 else 0,
-            )(http_client.get)
-
-    # The following 2 methods have been defined dynamically by __init__()
-    #def post(self, *args, **kwargs): pass
-    #def get(self, *args, **kwargs): pass
-
+            )(self.get)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/msal.egg-info/PKG-INFO 
new/msal-1.32.3/msal.egg-info/PKG-INFO
--- old/msal-1.32.0/msal.egg-info/PKG-INFO      2025-03-12 22:23:46.000000000 
+0100
+++ new/msal-1.32.3/msal.egg-info/PKG-INFO      2025-04-25 15:12:29.000000000 
+0200
@@ -1,6 +1,6 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.4
 Name: msal
-Version: 1.32.0
+Version: 1.32.3
 Summary: The Microsoft Authentication Library (MSAL) for Python library 
enables your app to access the Microsoft Cloud by supporting authentication of 
users with Microsoft Azure Active Directory accounts (AAD) and Microsoft 
Accounts (MSA) using industry standard OAuth2 and OpenID Connect.
 Home-page: 
https://github.com/AzureAD/microsoft-authentication-library-for-python
 Author: Microsoft Corporation
@@ -31,6 +31,7 @@
 Provides-Extra: broker
 Requires-Dist: pymsalruntime<0.18,>=0.14; (python_version >= "3.6" and 
platform_system == "Windows") and extra == "broker"
 Requires-Dist: pymsalruntime<0.18,>=0.17; (python_version >= "3.8" and 
platform_system == "Darwin") and extra == "broker"
+Dynamic: license-file
 
 # Microsoft Authentication Library (MSAL) for Python
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/tests/test_individual_cache.py 
new/msal-1.32.3/tests/test_individual_cache.py
--- old/msal-1.32.0/tests/test_individual_cache.py      2025-03-12 
22:23:40.000000000 +0100
+++ new/msal-1.32.3/tests/test_individual_cache.py      2025-04-25 
15:12:24.000000000 +0200
@@ -8,7 +8,13 @@
 class TestExpiringMapping(unittest.TestCase):
     def setUp(self):
         self.mapping = {}
-        self.m = ExpiringMapping(mapping=self.mapping, capacity=2, 
expires_in=1)
+        self.expires_in = 1
+        self.m = ExpiringMapping(
+            mapping=self.mapping, capacity=2, expires_in=self.expires_in)
+
+    def how_many(self):
+        # This helper checks how many items are in the mapping, WITHOUT 
triggering purge
+        return len(self.m._peek()[1])
 
     def test_should_disallow_accessing_reserved_keyword(self):
         with self.assertRaises(ValueError):
@@ -40,11 +46,21 @@
         sleep(1)
         self.assertEqual([], list(self.m))
 
-    def test_get_should_purge(self):
+    def 
test_get_should_not_purge_and_should_return_only_when_the_item_is_still_valid(self):
         self.m["thing one"] = "one"
+        self.m["thing two"] = "two"
         sleep(1)
+        self.assertEqual(2, self.how_many(), "We begin with 2 items")
         with self.assertRaises(KeyError):
             self.m["thing one"]
+        self.assertEqual(1, self.how_many(), "get() should not purge the 
remaining items")
+
+    def test_setitem_should_purge(self):
+        self.m["thing one"] = "one"
+        sleep(1)
+        self.m["thing two"] = "two"
+        self.assertEqual(1, self.how_many(), "setitem() should purge all 
expired items")
+        self.assertEqual("two", self.m["thing two"], "The remaining item 
should be thing two")
 
     def test_various_expiring_time(self):
         self.assertEqual(0, len(self.m))
@@ -57,12 +73,13 @@
     def test_old_item_can_be_updated_with_new_expiry_time(self):
         self.assertEqual(0, len(self.m))
         self.m["thing"] = "one"
-        self.m.set("thing", "two", 2)
+        new_lifetime = 3  # 2-second seems too short and causes flakiness
+        self.m.set("thing", "two", new_lifetime)
         self.assertEqual(1, len(self.m), "It contains 1 item")
         self.assertEqual("two", self.m["thing"], 'Already been updated to 
"two"')
-        sleep(1)
+        sleep(self.expires_in)
         self.assertEqual("two", self.m["thing"], "Not yet expires")
-        sleep(1)
+        sleep(new_lifetime - self.expires_in)
         self.assertEqual(0, len(self.m))
 
     def test_oversized_input_should_purge_most_aging_item(self):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/tests/test_mi.py 
new/msal-1.32.3/tests/test_mi.py
--- old/msal-1.32.0/tests/test_mi.py    2025-03-12 22:23:40.000000000 +0100
+++ new/msal-1.32.3/tests/test_mi.py    2025-04-25 15:12:24.000000000 +0200
@@ -9,7 +9,8 @@
     from mock import patch, ANY, mock_open, Mock
 import requests
 
-from tests.http_client import MinimalResponse
+from tests.test_throttled_http_client import (
+    MinimalResponse, ThrottledHttpClientBaseTestCase, DummyHttpClient)
 from msal import (
     SystemAssignedManagedIdentity, UserAssignedManagedIdentity,
     ManagedIdentityClient,
@@ -17,6 +18,7 @@
     ArcPlatformNotSupportedError,
 )
 from msal.managed_identity import (
+    _ThrottledHttpClient,
     _supported_arc_platforms_and_their_prefixes,
     get_managed_identity_source,
     APP_SERVICE,
@@ -49,6 +51,37 @@
             {"ManagedIdentityIdType": "SystemAssigned", "Id": None})
 
 
+class ThrottledHttpClientTestCase(ThrottledHttpClientBaseTestCase):
+    def test_throttled_http_client_should_not_alter_original_http_client(self):
+        self.assertNotAlteringOriginalHttpClient(_ThrottledHttpClient)
+
+    def 
test_throttled_http_client_should_not_cache_successful_http_response(self):
+        http_cache = {}
+        http_client=DummyHttpClient(
+            status_code=200,
+            response_text='{"access_token": "AT", "expires_in": "1234", 
"resource": "R"}',
+            )
+        app = ManagedIdentityClient(
+            SystemAssignedManagedIdentity(), http_client=http_client, 
http_cache=http_cache)
+        result = app.acquire_token_for_client(resource="R")
+        self.assertEqual("AT", result["access_token"])
+        self.assertEqual({}, http_cache, "Should not cache successful http 
response")
+
+    def 
test_throttled_http_client_should_cache_unsuccessful_http_response(self):
+        http_cache = {}
+        http_client=DummyHttpClient(
+            status_code=400,
+            response_headers={"Retry-After": "1"},
+            response_text='{"error": "invalid_request"}',
+            )
+        app = ManagedIdentityClient(
+            SystemAssignedManagedIdentity(), http_client=http_client, 
http_cache=http_cache)
+        result = app.acquire_token_for_client(resource="R")
+        self.assertEqual("invalid_request", result["error"])
+        self.assertNotEqual({}, http_cache, "Should cache unsuccessful http 
response")
+        self.assertCleanPickle(http_cache)
+
+
 class ClientTestCase(unittest.TestCase):
     maxDiff = None
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/msal-1.32.0/tests/test_throttled_http_client.py 
new/msal-1.32.3/tests/test_throttled_http_client.py
--- old/msal-1.32.0/tests/test_throttled_http_client.py 2025-03-12 
22:23:40.000000000 +0100
+++ new/msal-1.32.3/tests/test_throttled_http_client.py 2025-04-25 
15:12:24.000000000 +0200
@@ -1,27 +1,43 @@
 # Test cases for 
https://identitydivision.visualstudio.com/devex/_git/AuthLibrariesApiReview?version=GBdev&path=%2FService%20protection%2FIntial%20set%20of%20protection%20measures.md&_a=preview&anchor=common-test-cases
+import pickle
 from time import sleep
 from random import random
 import logging
-from msal.throttled_http_client import ThrottledHttpClient
+
+from msal.throttled_http_client import (
+    ThrottledHttpClientBase, ThrottledHttpClient, NormalizedResponse)
+
 from tests import unittest
-from tests.http_client import MinimalResponse
+from tests.http_client import MinimalResponse as _MinimalResponse
 
 
 logger = logging.getLogger(__name__)
 logging.basicConfig(level=logging.DEBUG)
 
 
+class MinimalResponse(_MinimalResponse):
+    SIGNATURE = str(random()).encode("utf-8")
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._ = (  # Only an instance attribute will be stored in pickled 
instance
+            self.__class__.SIGNATURE)  # Useful for testing its presence in 
pickled instance
+
+
 class DummyHttpClient(object):
-    def __init__(self, status_code=None, response_headers=None):
+    def __init__(self, status_code=None, response_headers=None, 
response_text=None):
         self._status_code = status_code
         self._response_headers = response_headers
+        self._response_text = response_text
 
     def _build_dummy_response(self):
         return MinimalResponse(
             status_code=self._status_code,
             headers=self._response_headers,
-            text=random(),  # So that we'd know whether a new response is 
received
-            )
+            text=self._response_text if self._response_text is not None else 
str(
+                random()  # So that we'd know whether a new response is 
received
+            ),
+        )
 
     def post(self, url, params=None, data=None, headers=None, **kwargs):
         return self._build_dummy_response()
@@ -33,23 +49,76 @@
         raise CloseMethodCalled("Not used by MSAL, but our customers may use 
it")
 
 
+class DummyHttpClientWithoutResponseHeaders(DummyHttpClient):
+    def post(self, url, params=None, data=None, headers=None, **kwargs):
+        response = super().post(url, params, data, headers, **kwargs)
+        del response.headers  # Early versions of MSAL did not require http 
client to return headers
+        return response
+
+    def get(self, url, params=None, headers=None, **kwargs):
+        response = super().get(url, params, headers, **kwargs)
+        del response.headers  # Early versions of MSAL did not require http 
client to return headers
+        return response
+
+
 class CloseMethodCalled(Exception):
     pass
 
 
-class TestHttpDecoration(unittest.TestCase):
+class ThrottledHttpClientBaseTestCase(unittest.TestCase):
 
-    def test_throttled_http_client_should_not_alter_original_http_client(self):
+    def assertCleanPickle(self, obj):
+        self.assertTrue(bool(obj), "The object should not be empty")
+        self.assertNotIn(
+            MinimalResponse.SIGNATURE, pickle.dumps(obj),
+            "A pickled object should not contain undesirable data")
+
+    def assertValidResponse(self, response):
+        self.assertIsInstance(response, NormalizedResponse)
+        self.assertCleanPickle(response)
+
+    def test_pickled_minimal_response_should_contain_signature(self):
+        self.assertIn(MinimalResponse.SIGNATURE, pickle.dumps(MinimalResponse(
+            status_code=200, headers={}, text="foo")))
+
+    def 
test_throttled_http_client_base_response_should_tolerate_headerless_response(self):
+        http_client = 
ThrottledHttpClientBase(DummyHttpClientWithoutResponseHeaders(
+            status_code=200, response_text="foo"))
+        response = http_client.post("https://example.com";)
+        self.assertEqual(response.text, "foo", "Should return the same 
response text")
+
+    def 
test_throttled_http_client_base_response_should_not_contain_signature(self):
+        http_client = ThrottledHttpClientBase(DummyHttpClient(status_code=200))
+        response = http_client.post("https://example.com";)
+        self.assertValidResponse(response)
+
+    def assertNotAlteringOriginalHttpClient(self, ThrottledHttpClientClass):
         original_http_client = DummyHttpClient()
         original_get = original_http_client.get
         original_post = original_http_client.post
-        throttled_http_client = ThrottledHttpClient(original_http_client)
+        throttled_http_client = ThrottledHttpClientClass(original_http_client)
         goal = """The implementation should wrap original http_client
             and keep it intact, instead of monkey-patching it"""
         self.assertNotEqual(throttled_http_client, original_http_client, goal)
         self.assertEqual(original_post, original_http_client.post)
         self.assertEqual(original_get, original_http_client.get)
 
+    def 
test_throttled_http_client_base_should_not_alter_original_http_client(self):
+        self.assertNotAlteringOriginalHttpClient(ThrottledHttpClientBase)
+
+    def test_throttled_http_client_base_should_not_nest_http_client(self):
+        original_http_client = DummyHttpClient()
+        throttled_http_client = ThrottledHttpClientBase(original_http_client)
+        self.assertIs(original_http_client, throttled_http_client.http_client)
+        nested_throttled_http_client = 
ThrottledHttpClientBase(throttled_http_client)
+        self.assertIs(original_http_client, 
nested_throttled_http_client.http_client)
+
+
+class ThrottledHttpClientTestCase(ThrottledHttpClientBaseTestCase):
+
+    def test_throttled_http_client_should_not_alter_original_http_client(self):
+        self.assertNotAlteringOriginalHttpClient(ThrottledHttpClient)
+
     def _test_RetryAfter_N_seconds_should_keep_entry_for_N_seconds(
             self, http_client, retry_after):
         http_cache = {}
@@ -112,15 +181,23 @@
         http_client = DummyHttpClient(
             status_code=400)  # It covers invalid_grant and 
interaction_required
         http_client = ThrottledHttpClient(http_client, http_cache=http_cache)
+
         resp1 = http_client.post("https://example.com";, data={"claims": "foo"})
         logger.debug(http_cache)
+        self.assertValidResponse(resp1)
         resp1_again = http_client.post("https://example.com";, data={"claims": 
"foo"})
+        self.assertValidResponse(resp1_again)
         self.assertEqual(resp1.text, resp1_again.text, "Should return a cached 
response")
+
         resp2 = http_client.post("https://example.com";, data={"claims": "bar"})
+        self.assertValidResponse(resp2)
         self.assertNotEqual(resp1.text, resp2.text, "Should return a new 
response")
         resp2_again = http_client.post("https://example.com";, data={"claims": 
"bar"})
+        self.assertValidResponse(resp2_again)
         self.assertEqual(resp2.text, resp2_again.text, "Should return a cached 
response")
 
+        self.assertCleanPickle(http_cache)
+
     def 
test_one_foci_app_recovering_from_invalid_grant_should_also_unblock_another(self):
         """
         Need not test multiple FOCI app's acquire_token_silent() here. By 
design,

Reply via email to