jedcunningham commented on code in PR #46621:
URL: https://github.com/apache/airflow/pull/46621#discussion_r2124893126


##########
providers/amazon/src/airflow/providers/amazon/aws/hooks/s3.py:
##########
@@ -1616,3 +1616,80 @@ def delete_bucket_tagging(self, bucket_name: str | None 
= None) -> None:
         """
         s3_client = self.get_conn()
         s3_client.delete_bucket_tagging(Bucket=bucket_name)
+
+    def _download_s3_delete_stale_local_files(self, current_s3_objects: 
list[Path], local_dir: Path):
+        current_s3_keys = {key for key in current_s3_objects}
+
+        for item in local_dir.iterdir():
+            item: Path  # type: ignore[no-redef]
+            absolute_item_path = item.resolve()
+
+            if absolute_item_path not in current_s3_keys:
+                try:
+                    if item.is_file():
+                        item.unlink(missing_ok=True)
+                        self.log.debug("Deleted stale local file: %s", item)
+                    elif item.is_dir():
+                        # delete only when the folder is empty
+                        if not os.listdir(item):
+                            item.rmdir()
+                            self.log.debug("Deleted stale empty directory: 
%s", item)
+                    else:
+                        self.log.debug("Skipping stale item of unknown type: 
%s", item)
+                except OSError as e:
+                    self.log.error("Error deleting stale item %s: %s", item, e)
+                    raise e
+
+    def _download_s3_object_if_changed(self, s3_bucket, s3_object, 
local_target_path: Path):
+        should_download = False
+        download_msg = ""
+        if not local_target_path.exists():
+            should_download = True
+            download_msg = f"Local file {local_target_path} does not exist."
+        else:
+            local_stats = local_target_path.stat()
+
+            if s3_object.size != local_stats.st_size:
+                should_download = True
+                download_msg = (
+                    f"S3 object size ({s3_object.size}) and local file size 
({local_stats.st_size}) differ."
+                )
+
+            s3_last_modified = s3_object.last_modified
+            if local_stats.st_mtime < s3_last_modified.microsecond:
+                should_download = True
+                download_msg = f"S3 object last modified 
({s3_last_modified.microsecond}) and local file last modified 
({local_stats.st_mtime}) differ."
+
+        if should_download:
+            s3_bucket.download_file(s3_object.key, local_target_path)
+            self.log.debug(
+                "%s Downloaded %s to %s", download_msg, s3_object.key, 
local_target_path.as_posix()
+            )
+        else:
+            self.log.debug(
+                "Local file %s is up-to-date with S3 object %s. Skipping 
download.",
+                local_target_path.as_posix(),
+                s3_object.key,
+            )
+
+    def download_s3(self, bucket_name: str, local_dir: Path, s3_prefix="", 
delete_stale: bool = True):

Review Comment:
   Might be better to name this something like "sync_to_local_dir" or something?



##########
providers/amazon/tests/unit/amazon/aws/bundles/test_s3.py:
##########
@@ -0,0 +1,305 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import logging
+import os
+import re
+
+import boto3
+import pytest
+from moto import mock_aws
+
+import airflow.version
+from airflow.exceptions import AirflowException
+from airflow.models import Connection
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.utils import db
+
+from tests_common.test_utils.config import conf_vars
+from tests_common.test_utils.db import clear_db_connections
+
+AWS_CONN_ID_WITH_REGION = "s3_dags_connection"
+AWS_CONN_ID_REGION = "eu-central-1"
+AWS_CONN_ID_DEFAULT = "aws_default"
+S3_BUCKET_NAME = "my-airflow-dags-bucket"
+S3_BUCKET_PREFIX = "project1/dags"
+
+if airflow.version.version.strip().startswith("3"):
+    from airflow.providers.amazon.aws.bundles.s3 import S3DagBundle
+
+
[email protected]
+def mocked_s3_resource():
+    with mock_aws():
+        yield boto3.resource("s3")
+
+
[email protected]
+def s3_client():
+    with mock_aws():
+        yield boto3.client("s3")
+
+
[email protected]
+def s3_bucket(mocked_s3_resource, s3_client):
+    bucket = mocked_s3_resource.create_bucket(Bucket=S3_BUCKET_NAME)
+
+    s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + 
"/dag_01.py", Body=b"test data")
+    s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + 
"/dag_02.py", Body=b"test data")
+    s3_client.put_object(
+        Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_a.py", 
Body=b"test data"
+    )
+    s3_client.put_object(
+        Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_b.py", 
Body=b"test data"
+    )
+
+    return bucket
+
+
[email protected](autouse=True)
+def bundle_temp_dir(tmp_path):
+    with conf_vars({("dag_processor", "dag_bundle_storage_path"): 
str(tmp_path)}):
+        yield tmp_path
+
+
[email protected](not airflow.version.version.strip().startswith("3"), 
reason="Airflow >=3.0.0 test")
+class TestS3DagBundle:
+    @classmethod
+    def teardown_class(cls) -> None:
+        clear_db_connections()
+
+    @classmethod
+    def setup_class(cls) -> None:
+        db.merge_conn(
+            Connection(
+                conn_id=AWS_CONN_ID_DEFAULT,
+                conn_type="aws",
+                extra={
+                    "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
+                },
+            )
+        )
+        db.merge_conn(
+            conn=Connection(
+                conn_id=AWS_CONN_ID_WITH_REGION,
+                conn_type="aws",
+                extra={
+                    "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}},
+                    "region_name": AWS_CONN_ID_REGION,
+                },
+            )
+        )
+
+    @pytest.mark.db_test
+    def test_view_url_generates_presigned_url(self):
+        bundle = S3DagBundle(
+            name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, 
prefix="project1/dags", bucket_name=S3_BUCKET_NAME
+        )
+        url: str = bundle.view_url("test_version")
+        assert 
url.startswith("https://my-airflow-dags-bucket.s3.amazonaws.com/project1/dags";)
+
+    @pytest.mark.db_test
+    def test_supports_versioning(self):
+        bundle = S3DagBundle(
+            name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, 
prefix="project1/dags", bucket_name=S3_BUCKET_NAME
+        )
+        assert S3DagBundle.supports_versioning is False
+
+        # set version, it's not supported
+        bundle.version = "test_version"
+
+        with pytest.raises(AirflowException, match="Refreshing a specific 
version is not supported"):
+            bundle.refresh()
+        with pytest.raises(AirflowException, match="S3 url with version is not 
supported"):
+            bundle.view_url("test_version")
+
+    @pytest.mark.db_test
+    def test_correct_bundle_path_used(self):
+        bundle = S3DagBundle(
+            name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, 
prefix="project1_dags", bucket_name="aiflow_dags"
+        )
+        assert str(bundle.base_dir) == str(bundle.s3_dags_dir)
+
+    @pytest.mark.db_test
+    def test_s3_bucket_and_prefix_validated(self, s3_bucket):
+        hook = S3Hook(aws_conn_id=AWS_CONN_ID_DEFAULT)
+        assert hook.check_for_bucket(s3_bucket.name) is True
+
+        bundle = S3DagBundle(
+            name="test",
+            aws_conn_id=AWS_CONN_ID_WITH_REGION,
+            prefix="project1_dags",
+            bucket_name="non-existing-bucket",
+        )
+        with pytest.raises(AirflowException, match="S3 
bucket.*non-existing-bucket.*does not exist.*"):
+            bundle.initialize()
+
+        bundle = S3DagBundle(
+            name="test",
+            aws_conn_id=AWS_CONN_ID_WITH_REGION,
+            prefix="non-existing-prefix",
+            bucket_name=S3_BUCKET_NAME,
+        )
+        with pytest.raises(AirflowException, match="S3 
prefix.*non-existing-prefix.*does not exist.*"):
+            bundle.initialize()
+
+        bundle = S3DagBundle(
+            name="test",
+            aws_conn_id=AWS_CONN_ID_WITH_REGION,
+            prefix=S3_BUCKET_PREFIX,
+            bucket_name=S3_BUCKET_NAME,
+        )
+        # initialize succeeds, with correct prefix and bucket
+        bundle.initialize()
+        assert bundle.s3_hook.region_name == AWS_CONN_ID_REGION
+
+        bundle = S3DagBundle(
+            name="test",
+            aws_conn_id=AWS_CONN_ID_WITH_REGION,
+            prefix="",
+            bucket_name=S3_BUCKET_NAME,
+        )
+        # initialize succeeds, with empty prefix
+        bundle.initialize()
+        assert bundle.s3_hook.region_name == AWS_CONN_ID_REGION
+
+    def _upload_fixtures(self, bucket: str, fixtures_dir: str) -> None:
+        client = boto3.client("s3")
+        fixtures_paths = [
+            os.path.join(path, filename) for path, _, files in 
os.walk(fixtures_dir) for filename in files
+        ]
+        for path in fixtures_paths:
+            key = os.path.relpath(path, fixtures_dir)
+            client.upload_file(Filename=path, Bucket=bucket, Key=key)
+
+    @pytest.mark.db_test
+    def test_refresh(self, s3_bucket, s3_client, caplog, cap_structlog):
+        caplog.set_level(logging.ERROR)
+        caplog.set_level(logging.DEBUG, 
logger="airflow.providers.amazon.aws.bundles.s3.S3DagBundle")
+        caplog.set_level(
+            logging.DEBUG, 
logger="airflow.task.hooks.airflow.providers.amazon.aws.hooks.s3.S3Hook"
+        )
+        bundle = S3DagBundle(
+            name="test",
+            aws_conn_id=AWS_CONN_ID_WITH_REGION,
+            prefix=S3_BUCKET_PREFIX,
+            bucket_name=S3_BUCKET_NAME,
+        )
+
+        bundle.initialize()
+        # dags are downloaded once by initialize and once with refresh called 
post initialize
+        assert cap_structlog.text.count("Downloading DAGs from s3") == 1
+        self.assert_log_matches_regex(
+            caplog=caplog,
+            level="DEBUG",
+            
regex=rf"Downloaded.*{S3_BUCKET_PREFIX}.*subproject1/dag_a.py.*{bundle.s3_dags_dir.as_posix()}.*subproject1/dag_a.py.*",
+        )
+
+        s3_client.put_object(Bucket=s3_bucket.name, Key=S3_BUCKET_PREFIX + 
"/dag_03.py", Body=b"test data")
+        bundle.refresh()
+        assert cap_structlog.text.count("Downloading DAGs from s3") == 2
+        self.assert_log_matches_regex(
+            caplog=caplog,
+            level="DEBUG",
+            regex=rf"Local file.*/{bundle.name}/subproject1/dag_a.py.*is 
up-to-date with S3 object.*{S3_BUCKET_PREFIX}.*subproject1/dag_a.py.*",
+        )
+        self.assert_log_matches_regex(
+            caplog=caplog,
+            level="DEBUG",
+            
regex=rf"Downloaded.*{S3_BUCKET_PREFIX}.*dag_03.py.*/{bundle.name}/dag_03.py",
+        )
+        assert bundle.s3_dags_dir.joinpath("dag_03.py").read_text() == "test 
data"
+        
bundle.s3_dags_dir.joinpath("dag_should_be_deleted.py").write_text("test dag")
+        
bundle.s3_dags_dir.joinpath("dag_should_be_deleted_folder").mkdir(exist_ok=True)
+        s3_client.put_object(

Review Comment:
   We should probably refactor this so the "syncing" stuff moved to the hook is 
tested with the hook tests instead.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to