jedcunningham commented on code in PR #46621:
URL: https://github.com/apache/airflow/pull/46621#discussion_r2101249576


##########
providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py:
##########
@@ -0,0 +1,214 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import structlog
+
+log = structlog.get_logger(__name__)
+
+from airflow.dag_processing.bundles.base import BaseDagBundle
+from airflow.exceptions import AirflowException
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+
+
+class S3DagBundle(BaseDagBundle):
+    """
+    S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
+
+    This allows Airflow to load DAGs directly from an S3 bucket.
+
+    :param aws_conn_id: Airflow connection ID for AWS.  Defaults to 
AwsBaseHook.default_conn_name.
+    :param bucket_name: The name of the S3 bucket containing the DAG files.
+    :param prefix:  Optional subdirectory within the S3 bucket where the DAGs 
are stored.
+                    If None, DAGs are assumed to be at the root of the bucket 
(Optional).
+    """
+
+    supports_versioning = False
+
+    def __init__(
+        self,
+        *,
+        aws_conn_id: str = AwsBaseHook.default_conn_name,
+        bucket_name: str,
+        prefix: str = "",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.aws_conn_id = aws_conn_id
+        self.bucket_name = bucket_name
+        self.prefix = prefix
+        # Local path where S3 DAGs are downloaded.
+        self.s3_dags_root_dir: Path = self.base_dir.joinpath("s3")
+        # Local path where S3 DAGs are downloaded for current config.
+        self.s3_dags_dir: Path = self.s3_dags_root_dir.joinpath(self.name)

Review Comment:
   ```suggestion
           # Local path where S3 DAGs are downloaded
           self.s3_dags_dir: Path = self.base_dir
   ```
   
   You don't need to double down on the s3 and name levels like this. 
`base_dir` already is specific to this bundle.



##########
providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py:
##########
@@ -0,0 +1,214 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import structlog
+
+log = structlog.get_logger(__name__)
+
+from airflow.dag_processing.bundles.base import BaseDagBundle
+from airflow.exceptions import AirflowException
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+
+
+class S3DagBundle(BaseDagBundle):
+    """
+    S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
+
+    This allows Airflow to load DAGs directly from an S3 bucket.
+
+    :param aws_conn_id: Airflow connection ID for AWS.  Defaults to 
AwsBaseHook.default_conn_name.
+    :param bucket_name: The name of the S3 bucket containing the DAG files.
+    :param prefix:  Optional subdirectory within the S3 bucket where the DAGs 
are stored.
+                    If None, DAGs are assumed to be at the root of the bucket 
(Optional).
+    """
+
+    supports_versioning = False
+
+    def __init__(
+        self,
+        *,
+        aws_conn_id: str = AwsBaseHook.default_conn_name,
+        bucket_name: str,
+        prefix: str = "",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.aws_conn_id = aws_conn_id
+        self.bucket_name = bucket_name
+        self.prefix = prefix
+        # Local path where S3 DAGs are downloaded.
+        self.s3_dags_root_dir: Path = self.base_dir.joinpath("s3")
+        # Local path where S3 DAGs are downloaded for current config.
+        self.s3_dags_dir: Path = self.s3_dags_root_dir.joinpath(self.name)
+
+        self._log = log.bind(
+            bundle_name=self.name,
+            version=self.version,
+            bucket_name=self.bucket_name,
+            prefix=self.prefix,
+            aws_conn_id=self.aws_conn_id,
+        )
+
+        try:
+            self.s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id)  # 
Initialize S3 hook.
+        except AirflowException as e:
+            self._log.warning("Could not create S3Hook for connection %s: %s", 
self.aws_conn_id, e)
+
+    def _initialize(self):
+        with self.lock():
+            if not self.s3_dags_dir.exists():
+                self._log.info("Creating local DAGs directory: %s", 
self.s3_dags_dir)
+                os.makedirs(self.s3_dags_dir)
+
+            if not self.s3_dags_dir.is_dir():
+                raise AirflowException(f"Local DAGs path: {self.s3_dags_dir} 
is not a directory.")
+
+            if not self.s3_hook.check_for_bucket(bucket_name=self.bucket_name):
+                raise AirflowException(f"S3 bucket '{self.bucket_name}' does 
not exist.")
+
+            if self.prefix:
+                # don't check when prefix is ""
+                if not self.s3_hook.check_for_prefix(
+                    bucket_name=self.bucket_name, prefix=self.prefix, 
delimiter="/"
+                ):
+                    raise AirflowException(
+                        f"S3 prefix 's3://{self.bucket_name}/{self.prefix}' 
does not exist."
+                    )
+
+            self._download_s3_dags()
+        self.refresh()

Review Comment:
   No point in calling refresh again right away... You just did it in the line 
above!
   ```suggestion
   ```



##########
providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py:
##########
@@ -0,0 +1,214 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import structlog
+
+log = structlog.get_logger(__name__)
+
+from airflow.dag_processing.bundles.base import BaseDagBundle
+from airflow.exceptions import AirflowException
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+
+
+class S3DagBundle(BaseDagBundle):
+    """
+    S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
+
+    This allows Airflow to load DAGs directly from an S3 bucket.
+
+    :param aws_conn_id: Airflow connection ID for AWS.  Defaults to 
AwsBaseHook.default_conn_name.
+    :param bucket_name: The name of the S3 bucket containing the DAG files.
+    :param prefix:  Optional subdirectory within the S3 bucket where the DAGs 
are stored.
+                    If None, DAGs are assumed to be at the root of the bucket 
(Optional).
+    """
+
+    supports_versioning = False
+
+    def __init__(
+        self,
+        *,
+        aws_conn_id: str = AwsBaseHook.default_conn_name,
+        bucket_name: str,
+        prefix: str = "",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.aws_conn_id = aws_conn_id
+        self.bucket_name = bucket_name
+        self.prefix = prefix
+        # Local path where S3 DAGs are downloaded.
+        self.s3_dags_root_dir: Path = self.base_dir.joinpath("s3")
+        # Local path where S3 DAGs are downloaded for current config.
+        self.s3_dags_dir: Path = self.s3_dags_root_dir.joinpath(self.name)
+
+        self._log = log.bind(
+            bundle_name=self.name,
+            version=self.version,
+            bucket_name=self.bucket_name,
+            prefix=self.prefix,
+            aws_conn_id=self.aws_conn_id,
+        )
+
+        try:
+            self.s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id)  # 
Initialize S3 hook.
+        except AirflowException as e:
+            self._log.warning("Could not create S3Hook for connection %s: %s", 
self.aws_conn_id, e)
+
+    def _initialize(self):
+        with self.lock():
+            if not self.s3_dags_dir.exists():
+                self._log.info("Creating local DAGs directory: %s", 
self.s3_dags_dir)
+                os.makedirs(self.s3_dags_dir)
+
+            if not self.s3_dags_dir.is_dir():
+                raise AirflowException(f"Local DAGs path: {self.s3_dags_dir} 
is not a directory.")
+
+            if not self.s3_hook.check_for_bucket(bucket_name=self.bucket_name):
+                raise AirflowException(f"S3 bucket '{self.bucket_name}' does 
not exist.")
+
+            if self.prefix:
+                # don't check when prefix is ""
+                if not self.s3_hook.check_for_prefix(
+                    bucket_name=self.bucket_name, prefix=self.prefix, 
delimiter="/"
+                ):
+                    raise AirflowException(
+                        f"S3 prefix 's3://{self.bucket_name}/{self.prefix}' 
does not exist."
+                    )
+
+            self._download_s3_dags()
+        self.refresh()
+
+    def initialize(self) -> None:
+        self._initialize()
+        super().initialize()
+
+    def _delete_stale_local_files(self, current_s3_objects: list[Path]):
+        current_s3_keys = {key for key in current_s3_objects}
+
+        for item in self.s3_dags_dir.iterdir():
+            item: Path  # type: ignore[no-redef]
+            absolute_item_path = item.resolve()
+
+            if absolute_item_path not in current_s3_keys:
+                try:
+                    if item.is_file():
+                        item.unlink(missing_ok=True)
+                        self._log.debug("Deleted stale local file: %s", item)
+                    elif item.is_dir():
+                        # delete only when the folder is empty
+                        if not os.listdir(item):
+                            item.rmdir()
+                            self._log.debug("Deleted stale empty directory: 
%s", item)
+                    else:
+                        self._log.debug("Skipping stale item of unknown type: 
%s", item)
+                except OSError as e:
+                    self._log.error("Error deleting stale item %s: %s", item, 
e)
+                    raise e
+
+    def _download_s3_object_if_changed(self, s3_bucket, s3_object, 
local_target_path: Path):
+        should_download = False
+        download_msg = ""
+        if not local_target_path.exists():
+            should_download = True
+            download_msg = f"Local file {local_target_path} does not exist."
+        else:
+            local_stats = local_target_path.stat()
+
+            if s3_object.size != local_stats.st_size:
+                should_download = True
+                download_msg = (
+                    f"S3 object size ({s3_object.size}) and local file size 
({local_stats.st_size}) differ."
+                )
+
+            s3_last_modified = s3_object.last_modified
+            if local_stats.st_mtime < s3_last_modified.microsecond:
+                should_download = True
+                download_msg = f"S3 object last modified 
({s3_last_modified.microsecond}) and local file last modified 
({local_stats.st_mtime}) differ."
+
+        if should_download:
+            s3_bucket.download_file(s3_object.key, local_target_path)
+            self._log.debug(
+                "%s Downloaded %s to %s", download_msg, s3_object.key, 
local_target_path.as_posix()
+            )
+        else:
+            self._log.debug(
+                "Local file %s is up-to-date with S3 object %s. Skipping 
download.",
+                local_target_path.as_posix(),
+                s3_object.key,
+            )
+
+    def _download_s3_dags(self):
+        """Download DAG files from the S3 bucket to the local directory."""
+        self._log.debug(
+            "Downloading DAGs from s3://%s/%s to %s", self.bucket_name, 
self.prefix, self.s3_dags_dir
+        )
+        local_s3_objects = []
+        s3_bucket = self.s3_hook.get_bucket(self.bucket_name)
+        for obj in s3_bucket.objects.filter(Prefix=self.prefix):

Review Comment:
   Forgive my s3 ignorance, but is there no higher level construct we can use 
to bring the local side up to date? Having to iterate through and do our own 
sync feels a bit odd.
   
   If we do need to do this, it feels like this might be better in the hook - 
the concept of syncing a directory isn't necessarily s3 bundle specific, but 
could be more generally useful.



##########
providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py:
##########
@@ -0,0 +1,214 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import structlog
+
+log = structlog.get_logger(__name__)
+
+from airflow.dag_processing.bundles.base import BaseDagBundle
+from airflow.exceptions import AirflowException
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+
+
+class S3DagBundle(BaseDagBundle):
+    """
+    S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
+
+    This allows Airflow to load DAGs directly from an S3 bucket.
+
+    :param aws_conn_id: Airflow connection ID for AWS.  Defaults to 
AwsBaseHook.default_conn_name.
+    :param bucket_name: The name of the S3 bucket containing the DAG files.
+    :param prefix:  Optional subdirectory within the S3 bucket where the DAGs 
are stored.
+                    If None, DAGs are assumed to be at the root of the bucket 
(Optional).
+    """
+
+    supports_versioning = False
+
+    def __init__(
+        self,
+        *,
+        aws_conn_id: str = AwsBaseHook.default_conn_name,
+        bucket_name: str,
+        prefix: str = "",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.aws_conn_id = aws_conn_id
+        self.bucket_name = bucket_name
+        self.prefix = prefix
+        # Local path where S3 DAGs are downloaded.
+        self.s3_dags_root_dir: Path = self.base_dir.joinpath("s3")
+        # Local path where S3 DAGs are downloaded for current config.
+        self.s3_dags_dir: Path = self.s3_dags_root_dir.joinpath(self.name)
+
+        self._log = log.bind(
+            bundle_name=self.name,
+            version=self.version,
+            bucket_name=self.bucket_name,
+            prefix=self.prefix,
+            aws_conn_id=self.aws_conn_id,
+        )
+
+        try:
+            self.s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id)  # 
Initialize S3 hook.

Review Comment:
   If you don't need the hook to generate the `view_url`, it probably would 
make sense to defer creating the hook in `initialize` instead.



##########
providers/amazon/src/airflow/providers/amazon/aws/bundles/s3.py:
##########
@@ -0,0 +1,214 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import structlog
+
+log = structlog.get_logger(__name__)
+
+from airflow.dag_processing.bundles.base import BaseDagBundle
+from airflow.exceptions import AirflowException
+from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+
+
+class S3DagBundle(BaseDagBundle):
+    """
+    S3 DAG bundle - exposes a directory in S3 as a DAG bundle.
+
+    This allows Airflow to load DAGs directly from an S3 bucket.
+
+    :param aws_conn_id: Airflow connection ID for AWS.  Defaults to 
AwsBaseHook.default_conn_name.
+    :param bucket_name: The name of the S3 bucket containing the DAG files.
+    :param prefix:  Optional subdirectory within the S3 bucket where the DAGs 
are stored.
+                    If None, DAGs are assumed to be at the root of the bucket 
(Optional).
+    """
+
+    supports_versioning = False
+
+    def __init__(
+        self,
+        *,
+        aws_conn_id: str = AwsBaseHook.default_conn_name,
+        bucket_name: str,
+        prefix: str = "",
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.aws_conn_id = aws_conn_id
+        self.bucket_name = bucket_name
+        self.prefix = prefix
+        # Local path where S3 DAGs are downloaded.
+        self.s3_dags_root_dir: Path = self.base_dir.joinpath("s3")
+        # Local path where S3 DAGs are downloaded for current config.
+        self.s3_dags_dir: Path = self.s3_dags_root_dir.joinpath(self.name)
+
+        self._log = log.bind(
+            bundle_name=self.name,
+            version=self.version,
+            bucket_name=self.bucket_name,
+            prefix=self.prefix,
+            aws_conn_id=self.aws_conn_id,
+        )
+
+        try:
+            self.s3_hook: S3Hook = S3Hook(aws_conn_id=self.aws_conn_id)  # 
Initialize S3 hook.
+        except AirflowException as e:
+            self._log.warning("Could not create S3Hook for connection %s: %s", 
self.aws_conn_id, e)
+
+    def _initialize(self):
+        with self.lock():
+            if not self.s3_dags_dir.exists():
+                self._log.info("Creating local DAGs directory: %s", 
self.s3_dags_dir)
+                os.makedirs(self.s3_dags_dir)
+
+            if not self.s3_dags_dir.is_dir():
+                raise AirflowException(f"Local DAGs path: {self.s3_dags_dir} 
is not a directory.")
+
+            if not self.s3_hook.check_for_bucket(bucket_name=self.bucket_name):
+                raise AirflowException(f"S3 bucket '{self.bucket_name}' does 
not exist.")
+
+            if self.prefix:
+                # don't check when prefix is ""
+                if not self.s3_hook.check_for_prefix(
+                    bucket_name=self.bucket_name, prefix=self.prefix, 
delimiter="/"
+                ):
+                    raise AirflowException(
+                        f"S3 prefix 's3://{self.bucket_name}/{self.prefix}' 
does not exist."
+                    )
+
+            self._download_s3_dags()
+        self.refresh()
+
+    def initialize(self) -> None:
+        self._initialize()
+        super().initialize()
+
+    def _delete_stale_local_files(self, current_s3_objects: list[Path]):
+        current_s3_keys = {key for key in current_s3_objects}
+
+        for item in self.s3_dags_dir.iterdir():
+            item: Path  # type: ignore[no-redef]
+            absolute_item_path = item.resolve()
+
+            if absolute_item_path not in current_s3_keys:
+                try:
+                    if item.is_file():
+                        item.unlink(missing_ok=True)
+                        self._log.debug("Deleted stale local file: %s", item)
+                    elif item.is_dir():
+                        # delete only when the folder is empty
+                        if not os.listdir(item):
+                            item.rmdir()
+                            self._log.debug("Deleted stale empty directory: 
%s", item)
+                    else:
+                        self._log.debug("Skipping stale item of unknown type: 
%s", item)
+                except OSError as e:
+                    self._log.error("Error deleting stale item %s: %s", item, 
e)
+                    raise e
+
+    def _download_s3_object_if_changed(self, s3_bucket, s3_object, 
local_target_path: Path):
+        should_download = False
+        download_msg = ""
+        if not local_target_path.exists():
+            should_download = True
+            download_msg = f"Local file {local_target_path} does not exist."
+        else:
+            local_stats = local_target_path.stat()
+
+            if s3_object.size != local_stats.st_size:
+                should_download = True
+                download_msg = (
+                    f"S3 object size ({s3_object.size}) and local file size 
({local_stats.st_size}) differ."
+                )
+
+            s3_last_modified = s3_object.last_modified
+            if local_stats.st_mtime < s3_last_modified.microsecond:
+                should_download = True
+                download_msg = f"S3 object last modified 
({s3_last_modified.microsecond}) and local file last modified 
({local_stats.st_mtime}) differ."
+
+        if should_download:
+            s3_bucket.download_file(s3_object.key, local_target_path)
+            self._log.debug(
+                "%s Downloaded %s to %s", download_msg, s3_object.key, 
local_target_path.as_posix()
+            )
+        else:
+            self._log.debug(
+                "Local file %s is up-to-date with S3 object %s. Skipping 
download.",
+                local_target_path.as_posix(),
+                s3_object.key,
+            )
+
+    def _download_s3_dags(self):
+        """Download DAG files from the S3 bucket to the local directory."""
+        self._log.debug(
+            "Downloading DAGs from s3://%s/%s to %s", self.bucket_name, 
self.prefix, self.s3_dags_dir
+        )
+        local_s3_objects = []
+        s3_bucket = self.s3_hook.get_bucket(self.bucket_name)
+        for obj in s3_bucket.objects.filter(Prefix=self.prefix):
+            obj_path = Path(obj.key)
+            local_target_path = 
self.s3_dags_dir.joinpath(obj_path.relative_to(self.prefix))
+            if not local_target_path.parent.exists():
+                local_target_path.parent.mkdir(parents=True, exist_ok=True)
+                self._log.debug("Created local directory: %s", 
local_target_path.parent)
+            self._download_s3_object_if_changed(
+                s3_bucket=s3_bucket, s3_object=obj, 
local_target_path=local_target_path
+            )
+            local_s3_objects.append(local_target_path)
+
+        self._delete_stale_local_files(current_s3_objects=local_s3_objects)
+
+    def __repr__(self):
+        return (
+            f"<S3DagBundle("
+            f"name={self.name!r}, "
+            f"bucket_name={self.bucket_name!r}, "
+            f"prefix={self.prefix!r}, "
+            f"version={self.version!r}"
+            f")>"
+        )
+
+    def get_current_version(self) -> str | None:
+        """Return the current version of the DAG bundle. Currently not 
supported."""
+        return None
+
+    @property
+    def path(self) -> Path:
+        """Return the local path to the DAG files."""
+        return self.s3_dags_dir  # Path where DAGs are downloaded.
+
+    def refresh(self) -> None:
+        """Refresh the DAG bundle by re-downloading the DAGs from S3."""
+        if self.version:
+            raise AirflowException("Refreshing a specific version is not 
supported")
+
+        with self.lock():
+            self._download_s3_dags()
+
+    def view_url(self, version: str | None = None) -> str | None:
+        """Return a URL for viewing the DAGs in S3. Currently, versioning is 
not supported."""
+        if self.version:
+            raise AirflowException("S3 url with version is not supported")
+
+        presigned_url = self.s3_hook.generate_presigned_url(

Review Comment:
   The intent is for linking out from the UI - we probably don't want a signed 
url from the hook for this purpose? But, instead just link out and let the UI 
users credentials take over, if they have access.



##########
providers/amazon/tests/unit/amazon/aws/bundles/test_s3.py:
##########
@@ -0,0 +1,293 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import logging
+import os
+import re
+
+import boto3
+import pytest
+from moto import mock_aws
+
+import airflow.version
+from airflow.exceptions import AirflowException
+from airflow.models import Connection
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.utils import db
+
+from tests_common.test_utils.config import conf_vars
+from tests_common.test_utils.db import clear_db_connections
+
+AWS_CONN_ID_WITH_REGION = "s3_dags_connection"
+AWS_CONN_ID_REGION = "eu-central-1"
+AWS_CONN_ID_DEFAULT = "aws_default"
+S3_BUCKET_NAME = "my-airflow-dags-bucket"
+S3_BUCKET_PREFIX = "project1/dags"
+
+if airflow.version.version.strip().startswith("3"):
+    from airflow.providers.amazon.aws.bundles.s3 import S3DagBundle
+
+
[email protected]
+def mocked_s3_resource():
+    with mock_aws():
+        yield boto3.resource("s3")
+
+
[email protected]
+def s3_client():
+    with mock_aws():
+        yield boto3.client("s3")
+
+
[email protected]
+def s3_bucket(mocked_s3_resource, s3_client):
+    bucket = mocked_s3_resource.create_bucket(Bucket=S3_BUCKET_NAME)
+
+    s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + 
"/dag_01.py", Body=b"test data")
+    s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + 
"/dag_02.py", Body=b"test data")
+    s3_client.put_object(
+        Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_a.py", 
Body=b"test data"
+    )
+    s3_client.put_object(
+        Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_b.py", 
Body=b"test data"
+    )
+
+    return bucket
+
+
[email protected](autouse=True)
+def bundle_temp_dir(tmp_path):
+    with conf_vars({("dag_processor", "dag_bundle_storage_path"): 
str(tmp_path)}):
+        yield tmp_path
+
+
[email protected](not airflow.version.version.strip().startswith("3"), 
reason="Airflow >=3.0.0 test")
+class TestS3DagBundle:
+    @classmethod
+    def teardown_class(cls) -> None:
+        clear_db_connections()
+
+    @classmethod
+    def setup_class(cls) -> None:
+        db.merge_conn(
+            Connection(
+                conn_id=AWS_CONN_ID_DEFAULT,
+                conn_type="aws",
+                extra={
+                    "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
+                },
+            )
+        )
+        db.merge_conn(
+            conn=Connection(
+                conn_id=AWS_CONN_ID_WITH_REGION,
+                conn_type="aws",
+                extra={
+                    "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
+                    "region_name": AWS_CONN_ID_REGION,
+                },
+            )
+        )
+
+    @pytest.mark.db_test
+    def test_view_url_generates_presigned_url(self):
+        bundle = S3DagBundle(
+            name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, 
prefix="project1/dags", bucket_name=S3_BUCKET_NAME
+        )
+        url: str = bundle.view_url("test_version")
+        assert 
url.startswith("https://my-airflow-dags-bucket.s3.amazonaws.com/project1/dags";)
+        assert "AWSAccessKeyId=" in url
+        assert "Signature=" in url
+        assert "Expires=" in url

Review Comment:
   Yeah, these are exactly what I would _not_ expect to see in the urls we give 
out to folks in the UI.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to