Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package python-azure-storage-file-datalake 
for openSUSE:Factory checked in at 2023-11-13 22:18:11
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-azure-storage-file-datalake (Old)
 and      
/work/SRC/openSUSE:Factory/.python-azure-storage-file-datalake.new.17445 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-azure-storage-file-datalake"

Mon Nov 13 22:18:11 2023 rev:21 rq:1124976 version:12.14.0

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-azure-storage-file-datalake/python-azure-storage-file-datalake.changes
    2023-10-12 11:52:19.995282639 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-azure-storage-file-datalake.new.17445/python-azure-storage-file-datalake.changes
 2023-11-13 22:20:48.547366344 +0100
@@ -1,0 +2,9 @@
+Fri Nov 10 11:12:19 UTC 2023 - John Paul Adrian Glaubitz 
<adrian.glaub...@suse.com>
+
+- New upstream release
+  + Version 12.14.0
+  + For detailed information about changes see the
+    CHANGELOG.md file provided with this package
+- Update Requires from setup.py
+
+-------------------------------------------------------------------

Old:
----
  azure-storage-file-datalake-12.13.1.tar.gz

New:
----
  azure-storage-file-datalake-12.14.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-azure-storage-file-datalake.spec ++++++
--- /var/tmp/diff_new_pack.05WDhd/_old  2023-11-13 22:20:49.031384165 +0100
+++ /var/tmp/diff_new_pack.05WDhd/_new  2023-11-13 22:20:49.035384312 +0100
@@ -21,7 +21,7 @@
 %define skip_python2 1
 %endif
 Name:           python-azure-storage-file-datalake
-Version:        12.13.1
+Version:        12.14.0
 Release:        0
 Summary:        Azure DataLake service client library for Python
 License:        MIT
@@ -38,7 +38,7 @@
 Requires:       python-azure-core >= 1.28.0
 Requires:       python-azure-nspkg >= 3.0.0
 Requires:       python-azure-storage-blob < 13.0.0
-Requires:       python-azure-storage-blob >= 12.18.1
+Requires:       python-azure-storage-blob >= 12.19.0
 Requires:       python-azure-storage-nspkg >= 3.0.0
 Requires:       python-isodate >= 0.6.1
 Requires:       (python-typing_extensions >= 4.3.0)

++++++ azure-storage-file-datalake-12.13.1.tar.gz -> 
azure-storage-file-datalake-12.14.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-storage-file-datalake-12.13.1/CHANGELOG.md 
new/azure-storage-file-datalake-12.14.0/CHANGELOG.md
--- old/azure-storage-file-datalake-12.13.1/CHANGELOG.md        2023-09-13 
23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/CHANGELOG.md        2023-11-07 
23:29:15.000000000 +0100
@@ -1,5 +1,23 @@
 # Release History
 
+## 12.14.0 (2023-11-07)
+
+### Features Added
+- Stable release of features from 12.14.0b1
+
+## 12.14.0b1 (2023-10-17)
+
+### Features Added
+- Added support for service version 2023-11-03.
+- Added `audience` as an optional keyword that can be specified on APIs that 
have a `credential` parameter. This
+keyword only has an effect when the credential provided is of type 
`TokenCredential`.
+
+## 12.13.2 (2023-10-10)
+
+### Bugs Fixed
+- Fixed an issue when an invalid type was provided for `credential` during 
client construction, the
+`__str__` of the object would be present in the exception message and 
therefore potentially logged.
+
 ## 12.13.1 (2023-09-13)
 
 ### Bugs Fixed
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-storage-file-datalake-12.13.1/PKG-INFO 
new/azure-storage-file-datalake-12.14.0/PKG-INFO
--- old/azure-storage-file-datalake-12.13.1/PKG-INFO    2023-09-13 
23:19:55.000465600 +0200
+++ new/azure-storage-file-datalake-12.14.0/PKG-INFO    2023-11-07 
23:30:03.745338400 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: azure-storage-file-datalake
-Version: 12.13.1
+Version: 12.14.0
 Summary: Microsoft Azure File DataLake Storage Client Library for Python
 Home-page: https://github.com/Azure/azure-sdk-for-python
 Author: Microsoft Corporation
@@ -15,12 +15,13 @@
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
 Classifier: License :: OSI Approved :: MIT License
 Requires-Python: >=3.7
 Description-Content-Type: text/markdown
 License-File: LICENSE
 Requires-Dist: azure-core<2.0.0,>=1.28.0
-Requires-Dist: azure-storage-blob<13.0.0,>=12.18.1
+Requires-Dist: azure-storage-blob<13.0.0,>=12.19.0
 Requires-Dist: typing-extensions>=4.3.0
 Requires-Dist: isodate>=0.6.1
 Provides-Extra: aio
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_directory_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_directory_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_directory_client.py
   2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_directory_client.py
   2023-11-07 23:29:15.000000000 +0100
@@ -56,6 +56,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
@@ -105,15 +108,9 @@
         :param directory_name:
             The name of directory to interact with. The directory is under 
file system.
         :type directory_name: str
-        :param credential:
-            The credentials with which to authenticate. This is optional if the
-            account URL already has a SAS token, or the connection string 
already has shared
-            access key values. The value can be a SAS token string,
-            an instance of a AzureSasCredential or AzureNamedKeyCredential 
from azure.core.credentials,
-            an account shared access key, or an instance of a TokenCredentials 
class from azure.identity.
-            Credentials provided here will take precedence over those in the 
connection string.
-            If using an instance of AzureNamedKeyCredential, "name" should be 
the storage account name, and "key"
-            should be the storage account key.
+        :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+            authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+            https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
         :return: a DataLakeDirectoryClient
         :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient
         """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_file_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_file_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_file_client.py
        2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_file_client.py
        2023-11-07 23:29:15.000000000 +0100
@@ -64,6 +64,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
@@ -112,6 +115,9 @@
             If using an instance of AzureNamedKeyCredential, "name" should be 
the storage account name, and "key"
             should be the storage account key.
         :paramtype credential: Optional[Union[str, Dict[str, str], 
"AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None,  # 
pylint: disable=line-too-long
+        :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+            authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+            https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
         :return a DataLakeFileClient
         :rtype ~azure.storage.filedatalake.DataLakeFileClient
         """
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_service_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_service_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_data_lake_service_client.py
     2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_data_lake_service_client.py
     2023-11-07 23:29:15.000000000 +0100
@@ -55,6 +55,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
 
     .. admonition:: Example:
@@ -148,6 +151,9 @@
             key, or an instance of a TokenCredentials class from 
azure.identity.
             Credentials provided here will take precedence over those in the 
connection string.
         :paramtype credential: Optional[Union[str, Dict[str, str], 
"AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]]  # pylint: 
disable=line-too-long
+        :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+            authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+            https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
         :return a DataLakeServiceClient
         :rtype ~azure.storage.filedatalake.DataLakeServiceClient
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_file_system_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_file_system_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_file_system_client.py
   2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_file_system_client.py
   2023-11-07 23:29:15.000000000 +0100
@@ -62,6 +62,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
@@ -161,6 +164,9 @@
             If using an instance of AzureNamedKeyCredential, "name" should be 
the storage account name, and "key"
             should be the storage account key.
         :paramtype credential: Optional[Union[str, Dict[str, str], 
"AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None,  # 
pylint: disable=line-too-long
+        :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+            authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+            https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
         :return a FileSystemClient
         :rtype ~azure.storage.filedatalake.FileSystemClient
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_path_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_path_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_path_client.py
  2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_path_client.py
  2023-11-07 23:29:15.000000000 +0100
@@ -64,6 +64,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
     """
     def __init__(
             self, account_url: str,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_serialize.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_serialize.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_serialize.py
    2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_serialize.py
    2023-11-07 23:29:15.000000000 +0100
@@ -31,6 +31,7 @@
     '2023-01-03',
     '2023-05-03',
     '2023-08-03',
+    '2023-11-03',
 ]  # This list must be in chronological order!
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/base_client.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/base_client.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/base_client.py
   2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/base_client.py
   2023-11-07 23:29:15.000000000 +0100
@@ -36,7 +36,7 @@
     UserAgentPolicy,
 )
 
-from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT, SERVICE_HOST_BASE, 
STORAGE_OAUTH_SCOPE
+from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, 
SERVICE_HOST_BASE, STORAGE_OAUTH_SCOPE
 from .models import LocationMode
 from .authentication import SharedKeyCredentialPolicy
 from .shared_access_signature import QueryStringConstants
@@ -221,13 +221,17 @@
         # type: (Any, **Any) -> Tuple[Configuration, Pipeline]
         self._credential_policy = None
         if hasattr(credential, "get_token"):
-            self._credential_policy = BearerTokenCredentialPolicy(credential, 
STORAGE_OAUTH_SCOPE)
+            if kwargs.get('audience'):
+                audience = str(kwargs.pop('audience')).rstrip('/') + 
DEFAULT_OAUTH_SCOPE
+            else:
+                audience = STORAGE_OAUTH_SCOPE
+            self._credential_policy = BearerTokenCredentialPolicy(credential, 
audience)
         elif isinstance(credential, SharedKeyCredentialPolicy):
             self._credential_policy = credential
         elif isinstance(credential, AzureSasCredential):
             self._credential_policy = AzureSasCredentialPolicy(credential)
         elif credential is not None:
-            raise TypeError(f"Unsupported credential: {credential}")
+            raise TypeError(f"Unsupported credential: {type(credential)}")
 
         config = kwargs.get("_configuration") or create_configuration(**kwargs)
         if kwargs.get("_pipeline"):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/base_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/base_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/base_client_async.py
     2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/base_client_async.py
     2023-11-07 23:29:15.000000000 +0100
@@ -24,7 +24,7 @@
 )
 from azure.core.pipeline.transport import AsyncHttpTransport
 
-from .constants import CONNECTION_TIMEOUT, READ_TIMEOUT, STORAGE_OAUTH_SCOPE
+from .constants import CONNECTION_TIMEOUT, DEFAULT_OAUTH_SCOPE, READ_TIMEOUT, 
STORAGE_OAUTH_SCOPE
 from .authentication import SharedKeyCredentialPolicy
 from .base_client import create_configuration
 from .policies import (
@@ -70,13 +70,17 @@
         # type: (Any, **Any) -> Tuple[Configuration, Pipeline]
         self._credential_policy = None
         if hasattr(credential, 'get_token'):
-            self._credential_policy = 
AsyncBearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE)
+            if kwargs.get('audience'):
+                audience = str(kwargs.pop('audience')).rstrip('/') + 
DEFAULT_OAUTH_SCOPE
+            else:
+                audience = STORAGE_OAUTH_SCOPE
+            self._credential_policy = 
AsyncBearerTokenCredentialPolicy(credential, audience)
         elif isinstance(credential, SharedKeyCredentialPolicy):
             self._credential_policy = credential
         elif isinstance(credential, AzureSasCredential):
             self._credential_policy = AzureSasCredentialPolicy(credential)
         elif credential is not None:
-            raise TypeError(f"Unsupported credential: {credential}")
+            raise TypeError(f"Unsupported credential: {type(credential)}")
         config = kwargs.get('_configuration') or create_configuration(**kwargs)
         if kwargs.get('_pipeline'):
             return config, kwargs['_pipeline']
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/policies.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/policies.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/policies.py
      2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/policies.py
      2023-11-07 23:29:15.000000000 +0100
@@ -40,7 +40,7 @@
 from azure.core.exceptions import AzureError, ServiceRequestError, 
ServiceResponseError
 
 from .authentication import StorageHttpChallenge
-from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE
+from .constants import DEFAULT_OAUTH_SCOPE
 from .models import LocationMode
 
 try:
@@ -647,9 +647,8 @@
 class StorageBearerTokenCredentialPolicy(BearerTokenCredentialPolicy):
     """ Custom Bearer token credential policy for following Storage Bearer 
challenges """
 
-    def __init__(self, credential, **kwargs):
-        # type: (TokenCredential, **Any) -> None
-        super(StorageBearerTokenCredentialPolicy, self).__init__(credential, 
STORAGE_OAUTH_SCOPE, **kwargs)
+    def __init__(self, credential: "TokenCredential", audience: str, **kwargs: 
Any) -> None:
+        super(StorageBearerTokenCredentialPolicy, self).__init__(credential, 
audience, **kwargs)
 
     def on_challenge(self, request, response):
         # type: (PipelineRequest, PipelineResponse) -> bool
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/policies_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/policies_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared/policies_async.py
        2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared/policies_async.py
        2023-11-07 23:29:15.000000000 +0100
@@ -14,7 +14,7 @@
 from azure.core.exceptions import AzureError
 
 from .authentication import StorageHttpChallenge
-from .constants import DEFAULT_OAUTH_SCOPE, STORAGE_OAUTH_SCOPE
+from .constants import DEFAULT_OAUTH_SCOPE
 from .policies import is_retry, StorageRetryPolicy
 
 if TYPE_CHECKING:
@@ -237,9 +237,8 @@
 class 
AsyncStorageBearerTokenCredentialPolicy(AsyncBearerTokenCredentialPolicy):
     """ Custom Bearer token credential policy for following Storage Bearer 
challenges """
 
-    def __init__(self, credential, **kwargs):
-        # type: (AsyncTokenCredential, **Any) -> None
-        super(AsyncStorageBearerTokenCredentialPolicy, 
self).__init__(credential, STORAGE_OAUTH_SCOPE, **kwargs)
+    def __init__(self, credential: "AsyncTokenCredential", audience: str, 
**kwargs: Any) -> None:
+        super(AsyncStorageBearerTokenCredentialPolicy, 
self).__init__(credential, audience, **kwargs)
 
     async def on_challenge(self, request, response):
         # type: (PipelineRequest, PipelineResponse) -> bool
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared_access_signature.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared_access_signature.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_shared_access_signature.py
      2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_shared_access_signature.py
      2023-11-07 23:29:15.000000000 +0100
@@ -26,7 +26,7 @@
         account_key,  # type: str
         resource_types,  # type: Union[ResourceTypes, str]
         permission,  # type: Union[AccountSasPermissions, str]
-        expiry,  # type: Optional[Union[datetime, str]]
+        expiry,  # type: Union[datetime, str]
         **kwargs # type: Any
     ):  # type: (...) -> str
     """Generates a shared access signature for the DataLake service.
@@ -44,17 +44,11 @@
     :param permission:
         The permissions associated with the shared access signature. The
         user is restricted to operations allowed by the permissions.
-        Required unless an id is given referencing a stored access policy
-        which contains this field. This field must be omitted if it has been
-        specified in an associated stored access policy.
     :type permission: str or ~azure.storage.filedatalake.AccountSasPermissions
     :param expiry:
         The time at which the shared access signature becomes invalid.
-        Required unless an id is given referencing a stored access policy
-        which contains this field. This field must be omitted if it has
-        been specified in an associated stored access policy. Azure will always
-        convert values to UTC. If a date is passed in without timezone info, it
-        is assumed to be UTC.
+        Azure will always convert values to UTC. If a date is passed in
+        without timezone info, it is assumed to be UTC.
     :type expiry: ~datetime.datetime or str
     :keyword start:
         The time at which the shared access signature becomes valid. If
@@ -136,6 +130,10 @@
         to UTC. If a date is passed in without timezone info, it is assumed to
         be UTC.
     :paramtype start: datetime or str
+    :keyword str policy_id:
+        A unique value up to 64 characters in length that correlates to a
+        stored access policy. To create a stored access policy, use
+        
:func:`~azure.storage.filedatalake.FileSystemClient.set_file_system_access_policy`.
     :keyword str ip:
         Specifies an IP address or a range of IP addresses from which to 
accept requests.
         If the IP address from which the request originates does not match the 
IP address
@@ -240,6 +238,10 @@
         to UTC. If a date is passed in without timezone info, it is assumed to
         be UTC.
     :paramtype start: ~datetime.datetime or str
+    :keyword str policy_id:
+        A unique value up to 64 characters in length that correlates to a
+        stored access policy. To create a stored access policy, use
+        
:func:`~azure.storage.filedatalake.FileSystemClient.set_file_system_access_policy`.
     :keyword str ip:
         Specifies an IP address or a range of IP addresses from which to 
accept requests.
         If the IP address from which the request originates does not match the 
IP address
@@ -351,6 +353,10 @@
         to UTC. If a date is passed in without timezone info, it is assumed to
         be UTC.
     :paramtype start: ~datetime.datetime or str
+    :keyword str policy_id:
+        A unique value up to 64 characters in length that correlates to a
+        stored access policy. To create a stored access policy, use
+        
:func:`~azure.storage.filedatalake.FileSystemClient.set_file_system_access_policy`.
     :keyword str ip:
         Specifies an IP address or a range of IP addresses from which to 
accept requests.
         If the IP address from which the request originates does not match the 
IP address
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_version.py 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_version.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/_version.py  
    2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/_version.py  
    2023-11-07 23:29:15.000000000 +0100
@@ -4,4 +4,4 @@
 # license information.
 # --------------------------------------------------------------------------
 
-VERSION = "12.13.1"
+VERSION = "12.14.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
 2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py
 2023-11-07 23:29:15.000000000 +0100
@@ -59,6 +59,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_file_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_file_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_file_client_async.py
      2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_file_client_async.py
      2023-11-07 23:29:15.000000000 +0100
@@ -56,6 +56,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
@@ -160,9 +163,9 @@
             This value is not tracked or validated on the client. To configure 
client-side network timesouts
             see `here 
<https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-file-datalake
             #other-client--per-operation-configuration>`_.
-        :return: response dict (Etag and last modified).
         :keyword str encryption_context:
             Specifies the encryption context to set on the file.
+        :return: response dict (Etag and last modified).
 
         .. admonition:: Example:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_service_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_service_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_data_lake_service_client_async.py
   2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_data_lake_service_client_async.py
   2023-11-07 23:29:15.000000000 +0100
@@ -59,6 +59,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_file_system_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_file_system_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_file_system_client_async.py
 2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_file_system_client_async.py
 2023-11-07 23:29:15.000000000 +0100
@@ -70,6 +70,9 @@
      :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
 
     .. admonition:: Example:
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_path_client_async.py
 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_path_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/azure/storage/filedatalake/aio/_path_client_async.py
        2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure/storage/filedatalake/aio/_path_client_async.py
        2023-11-07 23:29:15.000000000 +0100
@@ -51,6 +51,9 @@
     :keyword str api_version:
         The Storage API version to use for requests. Default value is the most 
recent service version that is
         compatible with the current SDK. Setting to an older version may 
result in reduced feature compatibility.
+    :keyword str audience: The audience to use when requesting tokens for 
Azure Active Directory
+        authentication. Only has an effect when credential is of type 
TokenCredential. The value could be
+        https://storage.azure.com/ (default) or 
https://<account>.blob.core.windows.net.
     """
     def __init__(
             self, account_url: str,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure_storage_file_datalake.egg-info/PKG-INFO
 
new/azure-storage-file-datalake-12.14.0/azure_storage_file_datalake.egg-info/PKG-INFO
--- 
old/azure-storage-file-datalake-12.13.1/azure_storage_file_datalake.egg-info/PKG-INFO
       2023-09-13 23:19:54.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure_storage_file_datalake.egg-info/PKG-INFO
       2023-11-07 23:30:03.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: azure-storage-file-datalake
-Version: 12.13.1
+Version: 12.14.0
 Summary: Microsoft Azure File DataLake Storage Client Library for Python
 Home-page: https://github.com/Azure/azure-sdk-for-python
 Author: Microsoft Corporation
@@ -15,12 +15,13 @@
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
 Classifier: License :: OSI Approved :: MIT License
 Requires-Python: >=3.7
 Description-Content-Type: text/markdown
 License-File: LICENSE
 Requires-Dist: azure-core<2.0.0,>=1.28.0
-Requires-Dist: azure-storage-blob<13.0.0,>=12.18.1
+Requires-Dist: azure-storage-blob<13.0.0,>=12.19.0
 Requires-Dist: typing-extensions>=4.3.0
 Requires-Dist: isodate>=0.6.1
 Provides-Extra: aio
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/azure_storage_file_datalake.egg-info/requires.txt
 
new/azure-storage-file-datalake-12.14.0/azure_storage_file_datalake.egg-info/requires.txt
--- 
old/azure-storage-file-datalake-12.13.1/azure_storage_file_datalake.egg-info/requires.txt
   2023-09-13 23:19:54.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/azure_storage_file_datalake.egg-info/requires.txt
   2023-11-07 23:30:03.000000000 +0100
@@ -1,5 +1,5 @@
 azure-core<2.0.0,>=1.28.0
-azure-storage-blob<13.0.0,>=12.18.1
+azure-storage-blob<13.0.0,>=12.19.0
 typing-extensions>=4.3.0
 isodate>=0.6.1
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/azure-storage-file-datalake-12.13.1/setup.py 
new/azure-storage-file-datalake-12.14.0/setup.py
--- old/azure-storage-file-datalake-12.13.1/setup.py    2023-09-13 
23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/setup.py    2023-11-07 
23:29:15.000000000 +0100
@@ -65,6 +65,7 @@
         'Programming Language :: Python :: 3.8',
         'Programming Language :: Python :: 3.9',
         'Programming Language :: Python :: 3.10',
+        'Programming Language :: Python :: 3.11',
         'License :: OSI Approved :: MIT License',
     ],
     zip_safe=False,
@@ -77,7 +78,7 @@
     python_requires=">=3.7",
     install_requires=[
         "azure-core<2.0.0,>=1.28.0",
-        "azure-storage-blob<13.0.0,>=12.18.1",
+        "azure-storage-blob<13.0.0,>=12.19.0",
         "typing-extensions>=4.3.0",
         "isodate>=0.6.1"
     ],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_datalake_service_client.py 
new/azure-storage-file-datalake-12.14.0/tests/test_datalake_service_client.py
--- 
old/azure-storage-file-datalake-12.13.1/tests/test_datalake_service_client.py   
    2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/tests/test_datalake_service_client.py   
    2023-11-07 23:29:15.000000000 +0100
@@ -8,7 +8,7 @@
 from unittest.mock import MagicMock
 
 from azure.core.credentials import AzureNamedKeyCredential
-from azure.core.exceptions import HttpResponseError
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError
 from azure.storage.filedatalake import (
     AnalyticsLogging,
     CorsRule,
@@ -452,3 +452,50 @@
         
dir_client._datalake_client_for_blob_operation.close.assert_called_once()
         file_client._client.__exit__.assert_called_once()
         
file_client._datalake_client_for_blob_operation.close.assert_called_once()
+
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_storage_account_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setup(datalake_storage_account_name, 
datalake_storage_account_key)
+        self.dsc.create_file_system('testfs1')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        dsc = DataLakeServiceClient(
+            self.account_url(datalake_storage_account_name, "blob"),
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = dsc.list_file_systems()
+        response2 = dsc.create_file_system('testfs11')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_bad_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setup(datalake_storage_account_name, 
datalake_storage_account_key)
+        self.dsc.create_file_system('testfs2')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        dsc = DataLakeServiceClient(
+            self.account_url(datalake_storage_account_name, "blob"),
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            dsc.list_file_systems()
+            dsc.create_file_system('testfs22')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_datalake_service_client_async.py
 
new/azure-storage-file-datalake-12.14.0/tests/test_datalake_service_client_async.py
--- 
old/azure-storage-file-datalake-12.13.1/tests/test_datalake_service_client_async.py
 2023-09-13 23:18:55.000000000 +0200
+++ 
new/azure-storage-file-datalake-12.14.0/tests/test_datalake_service_client_async.py
 2023-11-07 23:29:15.000000000 +0100
@@ -8,7 +8,7 @@
 import sys
 
 from azure.core.credentials import AzureNamedKeyCredential
-from azure.core.exceptions import HttpResponseError
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError
 
 from azure.storage.filedatalake import (
     AnalyticsLogging,
@@ -454,3 +454,50 @@
         
dir_client._datalake_client_for_blob_operation.close.assert_called_once()
         file_client._client.__aexit__.assert_called_once()
         
file_client._datalake_client_for_blob_operation.close.assert_called_once()
+
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_storage_account_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        self._setup(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_system_name = self.get_resource_name('filesystem')
+        await self.dsc.create_file_system(file_system_name)
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        dsc = DataLakeServiceClient(
+            self.account_url(datalake_storage_account_name, "blob"),
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = dsc.list_file_systems()
+        response2 = dsc.create_file_system(file_system_name + '1')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_bad_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        self._setup(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_system_name = self.get_resource_name('filesystem')
+        await self.dsc.create_file_system(file_system_name)
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        dsc = DataLakeServiceClient(
+            self.account_url(datalake_storage_account_name, "blob"),
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            dsc.list_file_systems()
+            await dsc.create_file_system(file_system_name + '1')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_directory.py 
new/azure-storage-file-datalake-12.14.0/tests/test_directory.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_directory.py     
2023-09-13 23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_directory.py     
2023-11-07 23:29:15.000000000 +0100
@@ -11,6 +11,7 @@
 from azure.core import MatchConditions
 from azure.core.exceptions import (
     AzureError,
+    ClientAuthenticationError,
     HttpResponseError,
     ResourceExistsError,
     ResourceModifiedError,
@@ -1557,6 +1558,56 @@
         assert dir_client2.api_version == "2019-02-02"
         assert file_client2.api_version == "2019-02-02"
 
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_storage_account_audience_dir_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        directory_name = self._get_directory_reference()
+        directory_client = 
self.dsc.get_directory_client(self.file_system_name, directory_name)
+        directory_client.create_directory()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        directory_client = DataLakeDirectoryClient(
+            self.dsc.url, self.file_system_name, directory_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = directory_client.exists()
+        response2 = directory_client.create_sub_directory('testsubdir')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_bad_audience_dir_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        directory_name = self._get_directory_reference()
+        directory_client = 
self.dsc.get_directory_client(self.file_system_name, directory_name)
+        directory_client.create_directory()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        directory_client = DataLakeDirectoryClient(
+            self.dsc.url, self.file_system_name, directory_name,
+            credential=token_credential, 
audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            directory_client.exists()
+            directory_client.create_sub_directory('testsubdir')
+
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
     unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_directory_async.py 
new/azure-storage-file-datalake-12.14.0/tests/test_directory_async.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_directory_async.py       
2023-09-13 23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_directory_async.py       
2023-11-07 23:29:15.000000000 +0100
@@ -12,6 +12,7 @@
 from azure.core import MatchConditions
 from azure.core.exceptions import (
     AzureError,
+    ClientAuthenticationError,
     HttpResponseError,
     ResourceExistsError,
     ResourceModifiedError,
@@ -1505,6 +1506,58 @@
         assert dir_client2.api_version == "2019-02-02"
         assert file_client2.api_version == "2019-02-02"
 
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_storage_account_audience_dir_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        await self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        # generate a token with directory level create permission
+        directory_name = self._get_directory_reference()
+        directory_client = 
self.dsc.get_directory_client(self.file_system_name, directory_name)
+        await directory_client.create_directory()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        directory_client = DataLakeDirectoryClient(
+            self.dsc.url, self.file_system_name, directory_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = directory_client.exists()
+        response2 = directory_client.create_sub_directory('testsubdir')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_bad_audience_dir_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        await self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        # generate a token with directory level create permission
+        directory_name = self._get_directory_reference()
+        directory_client = 
self.dsc.get_directory_client(self.file_system_name, directory_name)
+        await directory_client.create_directory()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        directory_client = DataLakeDirectoryClient(
+            self.dsc.url, self.file_system_name, directory_name,
+            credential=token_credential, 
audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            await directory_client.exists()
+            await directory_client.create_sub_directory('testsubdir')
+
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
     unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_file.py 
new/azure-storage-file-datalake-12.14.0/tests/test_file.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_file.py  2023-09-13 
23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_file.py  2023-11-07 
23:29:15.000000000 +0100
@@ -1579,6 +1579,59 @@
         assert file_properties['group'] is not None
         assert file_properties['permissions'] is not None
 
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_storage_account_audience_file_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_client = self._create_file_and_return_client()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fc = DataLakeFileClient(
+            self.account_url(datalake_storage_account_name, 'dfs'),
+            file_client.file_system_name + '/',
+            '/' + file_client.path_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        data = b'Hello world'
+        response1 = fc.get_file_properties()
+        response2 = fc.upload_data(data, overwrite=True)
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_bad_audience_file_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_client = self._create_file_and_return_client()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fc = DataLakeFileClient(
+            self.account_url(datalake_storage_account_name, 'dfs'),
+            file_client.file_system_name + '/',
+            '/' + file_client.path_name,
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        data = b'Hello world'
+        with pytest.raises(ClientAuthenticationError):
+            fc.get_file_properties()
+            fc.upload_data(data, overwrite=True)
+
 
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_file_async.py 
new/azure-storage-file-datalake-12.14.0/tests/test_file_async.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_file_async.py    
2023-09-13 23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_file_async.py    
2023-11-07 23:29:15.000000000 +0100
@@ -1477,6 +1477,59 @@
         assert file_properties['group'] is not None
         assert file_properties['permissions'] is not None
 
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_storage_account_audience_file_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        await self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_client = await self._create_file_and_return_client()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fc = DataLakeFileClient(
+            self.account_url(datalake_storage_account_name, 'dfs'),
+            file_client.file_system_name + '/',
+            '/' + file_client.path_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        data = b'Hello world'
+        response1 = await fc.get_file_properties()
+        response2 = await fc.upload_data(data, overwrite=True)
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_bad_audience_file_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        await self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        file_client = await self._create_file_and_return_client()
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fc = DataLakeFileClient(
+            self.account_url(datalake_storage_account_name, 'dfs'),
+            file_client.file_system_name + '/',
+            '/' + file_client.path_name,
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        data = b'Hello world'
+        with pytest.raises(ClientAuthenticationError):
+            await fc.get_file_properties()
+            await fc.upload_data(data, overwrite=True)
+
 
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_file_system.py 
new/azure-storage-file-datalake-12.14.0/tests/test_file_system.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_file_system.py   
2023-09-13 23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_file_system.py   
2023-11-07 23:29:15.000000000 +0100
@@ -9,7 +9,7 @@
 
 import pytest
 from azure.core import MatchConditions
-from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
+from azure.core.exceptions import ClientAuthenticationError, 
HttpResponseError, ResourceNotFoundError
 from azure.storage.filedatalake import (
     AccessPolicy,
     AccountSasPermissions,
@@ -1071,6 +1071,61 @@
         resp = restored_file_client.get_file_properties()
         assert resp is not None
 
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_storage_account_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        url = self.account_url(datalake_storage_account_name, 'dfs')
+        file_system_name = self._get_file_system_reference()
+        file_system_client = self.dsc.get_file_system_client(file_system_name)
+        file_system_client.create_file_system()
+        file_system_client.create_directory('testdir1')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fsc = FileSystemClient(
+            url, file_system_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = fsc.exists()
+        response2 = fsc.create_directory('testdir11')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy
+    def test_bad_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        url = self.account_url(datalake_storage_account_name, 'dfs')
+        file_system_name = self._get_file_system_reference()
+        file_system_client = self.dsc.get_file_system_client(file_system_name)
+        file_system_client.create_file_system()
+        file_system_client.create_directory('testdir2')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fsc = FileSystemClient(
+            url, file_system_name,
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            fsc.exists()
+            fsc.create_directory('testdir22')
+
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
     unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/azure-storage-file-datalake-12.13.1/tests/test_file_system_async.py 
new/azure-storage-file-datalake-12.14.0/tests/test_file_system_async.py
--- old/azure-storage-file-datalake-12.13.1/tests/test_file_system_async.py     
2023-09-13 23:18:55.000000000 +0200
+++ new/azure-storage-file-datalake-12.14.0/tests/test_file_system_async.py     
2023-11-07 23:29:15.000000000 +0100
@@ -11,7 +11,7 @@
 
 import pytest
 from azure.core import MatchConditions
-from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
+from azure.core.exceptions import ClientAuthenticationError, 
HttpResponseError, ResourceNotFoundError
 from azure.storage.filedatalake import (
     AccessPolicy,
     AccountSasPermissions,
@@ -1201,6 +1201,61 @@
         resp = await restored_file_client.get_file_properties()
         assert resp is not None
 
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_storage_account_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        url = self.account_url(datalake_storage_account_name, 'dfs')
+        file_system_name = self._get_file_system_reference()
+        file_system_client = self.dsc.get_file_system_client(file_system_name)
+        await file_system_client.create_file_system()
+        await file_system_client.create_directory('testdir1')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fsc = FileSystemClient(
+            url, file_system_name,
+            credential=token_credential,
+            
audience=f'https://{datalake_storage_account_name}.blob.core.windows.net/'
+        )
+
+        # Assert
+        response1 = await fsc.exists()
+        response2 = await fsc.create_directory('testdir11')
+        assert response1 is not None
+        assert response2 is not None
+
+    @DataLakePreparer()
+    @recorded_by_proxy_async
+    async def test_bad_audience_service_client(self, **kwargs):
+        datalake_storage_account_name = 
kwargs.pop("datalake_storage_account_name")
+        datalake_storage_account_key = 
kwargs.pop("datalake_storage_account_key")
+
+        # Arrange
+        self._setUp(datalake_storage_account_name, 
datalake_storage_account_key)
+        url = self.account_url(datalake_storage_account_name, 'dfs')
+        file_system_name = self._get_file_system_reference()
+        file_system_client = self.dsc.get_file_system_client(file_system_name)
+        await file_system_client.create_file_system()
+        await file_system_client.create_directory('testdir2')
+
+        # Act
+        token_credential = self.generate_oauth_token()
+        fsc = FileSystemClient(
+            url, file_system_name,
+            credential=token_credential,
+            audience=f'https://badaudience.blob.core.windows.net/'
+        )
+
+        # Assert
+        with pytest.raises(ClientAuthenticationError):
+            await fsc.exists()
+            await fsc.create_directory('testdir22')
+
 # 
------------------------------------------------------------------------------
 if __name__ == '__main__':
     unittest.main()

Reply via email to