ismailsimsek commented on code in PR #46621: URL: https://github.com/apache/airflow/pull/46621#discussion_r2104649941
########## providers/amazon/tests/unit/amazon/aws/bundles/test_s3.py: ########## @@ -0,0 +1,293 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging +import os +import re + +import boto3 +import pytest +from moto import mock_aws + +import airflow.version +from airflow.exceptions import AirflowException +from airflow.models import Connection +from airflow.providers.amazon.aws.hooks.s3 import S3Hook +from airflow.utils import db + +from tests_common.test_utils.config import conf_vars +from tests_common.test_utils.db import clear_db_connections + +AWS_CONN_ID_WITH_REGION = "s3_dags_connection" +AWS_CONN_ID_REGION = "eu-central-1" +AWS_CONN_ID_DEFAULT = "aws_default" +S3_BUCKET_NAME = "my-airflow-dags-bucket" +S3_BUCKET_PREFIX = "project1/dags" + +if airflow.version.version.strip().startswith("3"): + from airflow.providers.amazon.aws.bundles.s3 import S3DagBundle + + [email protected] +def mocked_s3_resource(): + with mock_aws(): + yield boto3.resource("s3") + + [email protected] +def s3_client(): + with mock_aws(): + yield boto3.client("s3") + + [email protected] +def s3_bucket(mocked_s3_resource, s3_client): + bucket = mocked_s3_resource.create_bucket(Bucket=S3_BUCKET_NAME) + + s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/dag_01.py", Body=b"test data") + s3_client.put_object(Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/dag_02.py", Body=b"test data") + s3_client.put_object( + Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_a.py", Body=b"test data" + ) + s3_client.put_object( + Bucket=bucket.name, Key=S3_BUCKET_PREFIX + "/subproject1/dag_b.py", Body=b"test data" + ) + + return bucket + + [email protected](autouse=True) +def bundle_temp_dir(tmp_path): + with conf_vars({("dag_processor", "dag_bundle_storage_path"): str(tmp_path)}): + yield tmp_path + + [email protected](not airflow.version.version.strip().startswith("3"), reason="Airflow >=3.0.0 test") +class TestS3DagBundle: + @classmethod + def teardown_class(cls) -> None: + clear_db_connections() + + @classmethod + def setup_class(cls) -> None: + db.merge_conn( + Connection( + conn_id=AWS_CONN_ID_DEFAULT, + conn_type="aws", + extra={ + "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}}, + }, + ) + ) + db.merge_conn( + conn=Connection( + conn_id=AWS_CONN_ID_WITH_REGION, + conn_type="aws", + extra={ + "config_kwargs": {"s3": {"bucket_name": S3_BUCKET_NAME}}, + "region_name": AWS_CONN_ID_REGION, + }, + ) + ) + + @pytest.mark.db_test + def test_view_url_generates_presigned_url(self): + bundle = S3DagBundle( + name="test", aws_conn_id=AWS_CONN_ID_DEFAULT, prefix="project1/dags", bucket_name=S3_BUCKET_NAME + ) + url: str = bundle.view_url("test_version") + assert url.startswith("https://my-airflow-dags-bucket.s3.amazonaws.com/project1/dags") + assert "AWSAccessKeyId=" in url + assert "Signature=" in url + assert "Expires=" in url Review Comment: Changed to normal http url -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
