vincbeck commented on code in PR #31018:
URL: https://github.com/apache/airflow/pull/31018#discussion_r1197888577
##########
airflow/providers/amazon/aws/hooks/s3.py:
##########
@@ -1121,3 +1127,371 @@ def delete_bucket_tagging(self, bucket_name: str | None
= None) -> None:
"""
s3_client = self.get_conn()
s3_client.delete_bucket_tagging(Bucket=bucket_name)
+
+
+def provide_bucket_name_async(func: T) -> T:
+ """
+ Function decorator that provides a bucket name taken from the connection
+ in case no bucket name has been passed to the function.
+ """
+ function_signature = signature(func)
+
+ @wraps(func)
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
+ bound_args = function_signature.bind(*args, **kwargs)
+
+ if "bucket_name" not in bound_args.arguments:
+ self = args[0]
+ if self.aws_conn_id:
+ connection = await
sync_to_async(self.get_connection)(self.aws_conn_id)
+ if connection.schema:
+ bound_args.arguments["bucket_name"] = connection.schema
+
+ return await func(*bound_args.args, **bound_args.kwargs)
+
+ return cast(T, wrapper)
+
+
+class S3AsyncHook(AwsBaseAsyncHook):
Review Comment:
I think we decided on not having separate hook for sync and async operators.
You can find many examples at [closed
PRs](https://github.com/apache/airflow/pulls?q=is%3Apr+deferrable+is%3Aclosed+label%3Aprovider%3AAWS).
If you want one specific example, you can take a look at [this
one](https://github.com/apache/airflow/pull/31130).
In your example you would need to put `get_head_object` in the existing S3
hook (and add a suffix `_async` would be nice). And same with the other methods
in your hook.
I'll take a look deeper when these changes are done.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]