This is an automated email from the ASF dual-hosted git repository.
vincbeck pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new f6341440218 Add bucket_namespace support to `S3Hook.create_bucket` and
`S3CreateBucketOperator` (#63622)
f6341440218 is described below
commit f6341440218fec57fb00dbcc0f29d6a52ba8a3df
Author: Noritaka Sekiyama <[email protected]>
AuthorDate: Mon Mar 16 22:34:46 2026 +0800
Add bucket_namespace support to `S3Hook.create_bucket` and
`S3CreateBucketOperator` (#63622)
---
.../src/airflow/providers/amazon/aws/hooks/s3.py | 25 +++++++++++++------
.../airflow/providers/amazon/aws/operators/s3.py | 13 ++++++++--
.../amazon/tests/unit/amazon/aws/hooks/test_s3.py | 29 ++++++++++++++++++++++
.../tests/unit/amazon/aws/operators/test_s3.py | 20 ++++++++++++++-
4 files changed, 76 insertions(+), 11 deletions(-)
diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py
b/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py
index d0f8ce8b784..f999bd4f165 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py
@@ -338,7 +338,12 @@ class S3Hook(AwsBaseHook):
return self.resource.Bucket(bucket_name)
@provide_bucket_name
- def create_bucket(self, bucket_name: str | None = None, region_name: str |
None = None) -> None:
+ def create_bucket(
+ self,
+ bucket_name: str | None = None,
+ region_name: str | None = None,
+ bucket_namespace: str | None = None,
+ ) -> None:
"""
Create an Amazon S3 bucket.
@@ -347,6 +352,10 @@ class S3Hook(AwsBaseHook):
:param bucket_name: The name of the bucket
:param region_name: The name of the aws region in which to create the
bucket.
+ :param bucket_namespace: The namespace of the bucket. Set to
``account-regional`` to create
+ the bucket in the account-regional namespace. If not specified,
the bucket is created
+ in the global namespace. See:
+
https://docs.aws.amazon.com/AmazonS3/latest/userguide/gpbucketnamespaces.html
"""
if not region_name:
if self.conn_region_name == "aws-global":
@@ -356,13 +365,13 @@ class S3Hook(AwsBaseHook):
)
region_name = self.conn_region_name
- if region_name == "us-east-1":
- self.get_conn().create_bucket(Bucket=bucket_name)
- else:
- self.get_conn().create_bucket(
- Bucket=bucket_name,
- CreateBucketConfiguration={"LocationConstraint": region_name},
- )
+ kwargs: dict[str, Any] = {"Bucket": bucket_name}
+ if region_name != "us-east-1":
+ kwargs["CreateBucketConfiguration"] = {"LocationConstraint":
region_name}
+ if bucket_namespace:
+ kwargs["BucketNamespace"] = bucket_namespace
+
+ self.get_conn().create_bucket(**kwargs)
@provide_bucket_name
def check_for_prefix(self, prefix: str, delimiter: str, bucket_name: str |
None = None) -> bool:
diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py
b/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py
index f4582dc6ba6..50c1ac07983 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/s3.py
@@ -57,27 +57,36 @@ class S3CreateBucketOperator(AwsBaseOperator[S3Hook]):
empty, then default boto3 configuration would be used (and must be
maintained on each worker node).
:param region_name: AWS region_name. If not specified then the default
boto3 behaviour is used.
+ :param bucket_namespace: The namespace of the bucket. Set to
``account-regional`` to create
+ the bucket in the account-regional namespace. If not specified, the
bucket is created
+ in the global namespace.
:param verify: Whether or not to verify SSL certificates. See:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
:param botocore_config: Configuration dictionary (key-values) for botocore
client. See:
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
"""
- template_fields: Sequence[str] = aws_template_fields("bucket_name")
+ template_fields: Sequence[str] = aws_template_fields("bucket_name",
"bucket_namespace")
aws_hook_class = S3Hook
def __init__(
self,
*,
bucket_name: str,
+ bucket_namespace: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.bucket_name = bucket_name
+ self.bucket_namespace = bucket_namespace
def execute(self, context: Context):
if not self.hook.check_for_bucket(self.bucket_name):
- self.hook.create_bucket(bucket_name=self.bucket_name,
region_name=self.region_name)
+ self.hook.create_bucket(
+ bucket_name=self.bucket_name,
+ region_name=self.region_name,
+ bucket_namespace=self.bucket_namespace,
+ )
self.log.info("Created bucket with name: %s", self.bucket_name)
else:
self.log.info("Bucket with name: %s already exists",
self.bucket_name)
diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py
b/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py
index 99fd45aff89..eea053fc52b 100644
--- a/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py
+++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py
@@ -244,6 +244,35 @@ class TestAwsS3Hook:
region =
bucket.meta.client.get_bucket_location(Bucket=bucket.name).get("LocationConstraint")
assert region == (region_name if region_name != "us-east-1" else
None)
+ @mock_aws
+ def test_create_bucket_account_regional_namespace(self):
+ hook = S3Hook()
+ with mock.patch.object(hook, "get_conn") as mock_conn:
+ hook.create_bucket(
+ bucket_name="new_bucket",
+ region_name="us-east-2",
+ bucket_namespace="account-regional",
+ )
+ mock_conn().create_bucket.assert_called_once_with(
+ Bucket="new_bucket",
+ CreateBucketConfiguration={"LocationConstraint": "us-east-2"},
+ BucketNamespace="account-regional",
+ )
+
+ @mock_aws
+ def test_create_bucket_account_regional_namespace_us_east_1(self):
+ hook = S3Hook()
+ with mock.patch.object(hook, "get_conn") as mock_conn:
+ hook.create_bucket(
+ bucket_name="new_bucket",
+ region_name="us-east-1",
+ bucket_namespace="account-regional",
+ )
+ mock_conn().create_bucket.assert_called_once_with(
+ Bucket="new_bucket",
+ BucketNamespace="account-regional",
+ )
+
def test_create_bucket_no_region_regional_endpoint(self, monkeypatch):
conn = Connection(
conn_id="no-region-regional-endpoint",
diff --git a/providers/amazon/tests/unit/amazon/aws/operators/test_s3.py
b/providers/amazon/tests/unit/amazon/aws/operators/test_s3.py
index e9f7204b71b..a7501ee4bbf 100644
--- a/providers/amazon/tests/unit/amazon/aws/operators/test_s3.py
+++ b/providers/amazon/tests/unit/amazon/aws/operators/test_s3.py
@@ -98,7 +98,25 @@ class TestS3CreateBucketOperator:
# execute s3 bucket create operator
self.create_bucket_operator.execute({})
mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
- mock_create_bucket.assert_called_once_with(bucket_name=BUCKET_NAME,
region_name=None)
+ mock_create_bucket.assert_called_once_with(
+ bucket_name=BUCKET_NAME, region_name=None, bucket_namespace=None
+ )
+
+ @mock_aws
+ @mock.patch.object(S3Hook, "create_bucket")
+ @mock.patch.object(S3Hook, "check_for_bucket")
+ def test_execute_with_bucket_namespace(self, mock_check_for_bucket,
mock_create_bucket):
+ mock_check_for_bucket.return_value = False
+ operator = S3CreateBucketOperator(
+ task_id="test-s3-create-bucket-with-namespace",
+ bucket_name=BUCKET_NAME,
+ bucket_namespace="account-regional",
+ )
+ operator.execute({})
+ mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
+ mock_create_bucket.assert_called_once_with(
+ bucket_name=BUCKET_NAME, region_name=None,
bucket_namespace="account-regional"
+ )
def test_template_fields(self):
validate_template_fields(self.create_bucket_operator)