pankajastro commented on code in PR #28850:
URL: https://github.com/apache/airflow/pull/28850#discussion_r1122772845


##########
airflow/providers/amazon/aws/hooks/base_aws.py:
##########
@@ -865,3 +868,137 @@ def _parse_s3_config(config_file_name: str, 
config_format: str | None = "boto",
         config_format=config_format,
         profile=profile,
     )
+
+
+try:
+    import aiobotocore.credentials
+    from aiobotocore.session import AioSession, get_session
+except ImportError:
+    pass
+
+
+class BaseAsyncSessionFactory(BaseSessionFactory):
+    """
+    Base AWS Session Factory class to handle aiobotocore session creation.
+
+    It currently, handles ENV, AWS secret key and STS client method 
``assume_role``
+    provided in Airflow connection
+    """
+
+    async def get_role_credentials(self) -> dict:
+        """Get the role_arn, method credentials from connection details and 
get the role credentials detail"""
+        async with self._basic_session.create_client("sts", 
region_name=self.region_name) as client:
+            response = await client.assume_role(
+                RoleArn=self.role_arn,
+                
RoleSessionName=self._strip_invalid_session_name_characters(f"Airflow_{self.conn.conn_id}"),
+                **self.conn.assume_role_kwargs,
+            )
+            return response["Credentials"]
+
+    async def _get_refresh_credentials(self) -> dict[str, Any]:
+        self.log.debug("Refreshing credentials")
+        assume_role_method = self.conn.assume_role_method
+        if assume_role_method != "assume_role":
+            raise 
NotImplementedError(f"assume_role_method={assume_role_method} not expected")
+
+        credentials = await self.get_role_credentials()
+
+        expiry_time = credentials["Expiration"].isoformat()
+        self.log.debug("New credentials expiry_time: %s", expiry_time)
+        credentials = {
+            "access_key": credentials.get("AccessKeyId"),
+            "secret_key": credentials.get("SecretAccessKey"),
+            "token": credentials.get("SessionToken"),
+            "expiry_time": expiry_time,
+        }
+        return credentials
+
+    def _get_session_with_assume_role(self) -> AioSession:
+
+        assume_role_method = self.conn.assume_role_method
+        if assume_role_method != "assume_role":
+            raise 
NotImplementedError(f"assume_role_method={assume_role_method} not expected")
+
+        credentials = 
aiobotocore.credentials.AioRefreshableCredentials.create_from_metadata(
+            metadata=self._get_refresh_credentials(),
+            refresh_using=self._get_refresh_credentials,
+            method="sts-assume-role",
+        )
+
+        session = aiobotocore.session.get_session()
+        session._credentials = credentials
+        return session
+
+    @cached_property
+    def _basic_session(self) -> AioSession:
+        """Cached property with basic aiobotocore.session.AioSession."""
+        session_kwargs = self.conn.session_kwargs
+        aws_access_key_id = session_kwargs.get("aws_access_key_id")
+        aws_secret_access_key = session_kwargs.get("aws_secret_access_key")
+        aws_session_token = session_kwargs.get("aws_session_token")
+        region_name = session_kwargs.get("region_name")
+        profile_name = session_kwargs.get("profile_name")
+
+        aio_session = get_session()
+        if profile_name is not None:
+            aio_session.set_config_variable("profile", profile_name)
+        if aws_access_key_id or aws_secret_access_key or aws_session_token:
+            aio_session.set_credentials(
+                access_key=aws_access_key_id,
+                secret_key=aws_secret_access_key,
+                token=aws_session_token,
+            )
+        if region_name is not None:
+            aio_session.set_config_variable("region", region_name)
+        return aio_session
+
+    def create_session(self) -> AioSession:
+        """Create aiobotocore Session from connection and config."""
+        if not self._conn:
+            self.log.info("No connection ID provided. Fallback on boto3 
credential strategy")
+            return get_session()
+        elif not self.role_arn:
+            return self._basic_session
+        return self._get_session_with_assume_role()
+
+
+class AwsBaseAsyncHook(AwsBaseHook):
+    """
+    Interacts with AWS using aiobotocore asynchronously.
+
+    :param aws_conn_id: The Airflow connection used for AWS credentials.
+        If this is None or empty then the default botocore behaviour is used. 
If
+        running Airflow in a distributed manner and aws_conn_id is None or
+        empty, then default botocore configuration would be used (and must be
+        maintained on each worker node).
+    :param verify: Whether to verify SSL certificates.
+    :param region_name: AWS region_name. If not specified then the default 
boto3 behaviour is used.
+    :param client_type: boto3.client client_type. Eg 's3', 'emr' etc
+    :param resource_type: boto3.resource resource_type. Eg 'dynamodb' etc
+    :param config: Configuration for botocore client.
+    """
+
+    def __init__(self, **kwargs) -> None:
+        try:
+            pass
+        except ImportError:
+            raise AirflowOptionalProviderFeatureException(
+                "AWS deferrable operator feature is disable. To enable it 
please install aiobotocore>=2.1.1"
+            )

Review Comment:
   I wanted to fail in dag parsing only if aiobotocore is not installed but 
look like precommit replace import with pass



##########
airflow/providers/amazon/provider.yaml:
##########
@@ -596,3 +597,4 @@ additional-extras:
   - name: pandas
     dependencies:
       - pandas>=0.17.1
+      - aiobotocore>=2.1.1

Review Comment:
   yup



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to