This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/v1-10-test by this push:
     new 1cbcd6c  [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571)
1cbcd6c is described below

commit 1cbcd6c967f51051b0c71dd3ca0ecde2c5095b2c
Author: Omair Khan <[email protected]>
AuthorDate: Mon Apr 27 11:50:49 2020 +0530

    [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571)
    
    Fixes a bug introduced in #7680 with passing filename as string
    
    (cherry picked from commit 74bc316c56192f14677e9406d3878887a836062b)
---
 airflow/hooks/S3_hook.py    |  5 +++--
 tests/hooks/test_s3_hook.py | 12 ++++++++----
 2 files changed, 11 insertions(+), 6 deletions(-)

diff --git a/airflow/hooks/S3_hook.py b/airflow/hooks/S3_hook.py
index 1ba69c6..cce8e5b 100644
--- a/airflow/hooks/S3_hook.py
+++ b/airflow/hooks/S3_hook.py
@@ -361,8 +361,9 @@ class S3Hook(AwsHook):
         if encrypt:
             extra_args['ServerSideEncryption'] = "AES256"
         if gzip:
-            filename_gz = filename.name + '.gz'
-            with open(filename.name, 'rb') as f_in:
+            filename_gz = ''
+            with open(filename, 'rb') as f_in:
+                filename_gz = f_in.name + '.gz'
                 with gz.open(filename_gz, 'wb') as f_out:
                     shutil.copyfileobj(f_in, f_out)
                     filename = filename_gz
diff --git a/tests/hooks/test_s3_hook.py b/tests/hooks/test_s3_hook.py
index 3e6a831..e96afca 100644
--- a/tests/hooks/test_s3_hook.py
+++ b/tests/hooks/test_s3_hook.py
@@ -20,6 +20,8 @@
 
 import gzip as gz
 import io
+import os
+
 import mock
 import tempfile
 import unittest
@@ -320,13 +322,14 @@ class TestS3Hook(unittest.TestCase):
         # We need to create the bucket since this is all in Moto's 'virtual'
         # AWS account
         conn.create_bucket(Bucket="mybucket")
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(delete=False) as temp_file:
             temp_file.write(b"Content")
             temp_file.seek(0)
-            hook.load_file(temp_file, "my_key", 'mybucket', gzip=True)
+            hook.load_file(temp_file.name, "my_key", 'mybucket', gzip=True)
             resource = boto3.resource('s3').Object('mybucket', 'my_key')  # 
pylint: disable=no-member
             with 
gz.GzipFile(fileobj=io.BytesIO(resource.get()['Body'].read())) as gzfile:
                 assert gzfile.read() == b'Content'
+            os.unlink(temp_file.name)
 
     @mock_s3
     def test_load_file_acl(self):
@@ -335,16 +338,17 @@ class TestS3Hook(unittest.TestCase):
         # We need to create the bucket since this is all in Moto's 'virtual'
         # AWS account
         conn.create_bucket(Bucket="mybucket")
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(delete=False) as temp_file:
             temp_file.write(b"Content")
             temp_file.seek(0)
-            hook.load_file(temp_file, "my_key", 'mybucket', gzip=True,
+            hook.load_file(temp_file.name, "my_key", 'mybucket', gzip=True,
                            acl_policy='public-read')
             response = boto3.client('s3').get_object_acl(Bucket='mybucket',
                                                          Key="my_key",
                                                          
RequestPayer='requester')  # pylint: disable=no-member # noqa: E501 # pylint: 
disable=C0301
             assert ((response['Grants'][1]['Permission'] == 'READ') and
                     (response['Grants'][0]['Permission'] == 'FULL_CONTROL'))
+            os.unlink(temp_file.name)
 
     @mock_s3
     def test_copy_object_acl(self):

Reply via email to