dsynkov commented on a change in pull request #12389:
URL: https://github.com/apache/airflow/pull/12389#discussion_r526430786
##########
File path: tests/providers/amazon/aws/sensors/test_s3_key.py
##########
@@ -32,32 +32,40 @@ def
test_bucket_name_none_and_bucket_key_as_relative_path(self):
and bucket_key is provided as relative path rather than s3:// url.
:return:
"""
+ op = S3KeySensor(task_id='s3_key_sensor', bucket_key="file_in_bucket")
with self.assertRaises(AirflowException):
- S3KeySensor(task_id='s3_key_sensor', bucket_key="file_in_bucket")
+ op.poke(None)
def test_bucket_name_provided_and_bucket_key_is_s3_url(self):
"""
Test if exception is raised when bucket_name is provided
while bucket_key is provided as a full s3:// url.
:return:
"""
+ op = S3KeySensor(
+ task_id='s3_key_sensor', bucket_key="s3://test_bucket/file",
bucket_name='test_bucket'
+ )
with self.assertRaises(AirflowException):
- S3KeySensor(
- task_id='s3_key_sensor', bucket_key="s3://test_bucket/file",
bucket_name='test_bucket'
- )
+ op.poke(None)
@parameterized.expand(
[
['s3://bucket/key', None, 'key', 'bucket'],
['key', 'bucket', 'key', 'bucket'],
]
)
- def test_parse_bucket_key(self, key, bucket, parsed_key, parsed_bucket):
+ @mock.patch('airflow.providers.amazon.aws.sensors.s3_key.S3Hook')
Review comment:
Thanks; I added it as a separate test since per my understanding it
looks like rendering requires creating a `TaskInstance`. Let me know if that's
what you had in mind.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]