[ 
https://issues.apache.org/jira/browse/AIRFLOW-2629?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16722523#comment-16722523
 ] 

ASF GitHub Bot commented on AIRFLOW-2629:
-----------------------------------------

eladkal closed pull request #4317: [AIRFLOW-2629] Change reference of 
hive_hooks to hive_hook everywhere
URL: https://github.com/apache/incubator-airflow/pull/4317
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/UPDATING.md b/UPDATING.md
index 88dc78c810..8c42c77a88 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -24,6 +24,10 @@ assists users migrating to a new version.
 
 ## Airflow Master
 
+### Rename of hive_hooks.py to hive_hook.py
+
+For consistency with all other hooks hive_hooks.py file has been renamed to 
hive_hook.py
+
 ### New `dag_processor_manager_log_location` config option
 
 The DAG parsing manager log now by default will be log into a file, where its 
location is
diff --git a/airflow/contrib/operators/hive_to_dynamodb.py 
b/airflow/contrib/operators/hive_to_dynamodb.py
index 4a39e40741..fa5be638dc 100644
--- a/airflow/contrib/operators/hive_to_dynamodb.py
+++ b/airflow/contrib/operators/hive_to_dynamodb.py
@@ -20,7 +20,7 @@
 import json
 
 from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook
-from airflow.hooks.hive_hooks import HiveServer2Hook
+from airflow.hooks.hive_hook import HiveServer2Hook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/contrib/operators/vertica_to_hive.py 
b/airflow/contrib/operators/vertica_to_hive.py
index 00fdb649c4..dd9fe76ba3 100644
--- a/airflow/contrib/operators/vertica_to_hive.py
+++ b/airflow/contrib/operators/vertica_to_hive.py
@@ -22,7 +22,7 @@
 import unicodecsv as csv
 from tempfile import NamedTemporaryFile
 
-from airflow.hooks.hive_hooks import HiveCliHook
+from airflow.hooks.hive_hook import HiveCliHook
 from airflow.contrib.hooks.vertica_hook import VerticaHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/contrib/plugins/metastore_browser/main.py 
b/airflow/contrib/plugins/metastore_browser/main.py
index 836e53191a..580262a04d 100644
--- a/airflow/contrib/plugins/metastore_browser/main.py
+++ b/airflow/contrib/plugins/metastore_browser/main.py
@@ -24,7 +24,7 @@
 from flask_admin import BaseView, expose
 import pandas as pd
 
-from airflow.hooks.hive_hooks import HiveMetastoreHook, HiveCliHook
+from airflow.hooks.hive_hook import HiveMetastoreHook, HiveCliHook
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.hooks.presto_hook import PrestoHook
 from airflow.plugins_manager import AirflowPlugin
diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hook.py
similarity index 100%
rename from airflow/hooks/hive_hooks.py
rename to airflow/hooks/hive_hook.py
diff --git a/airflow/macros/hive.py b/airflow/macros/hive.py
index f8e57bb515..2b96323bac 100644
--- a/airflow/macros/hive.py
+++ b/airflow/macros/hive.py
@@ -47,7 +47,7 @@ def max_partition(
     >>> max_partition('airflow.static_babynames_partitioned')
     '2015-01-01'
     """
-    from airflow.hooks.hive_hooks import HiveMetastoreHook
+    from airflow.hooks.hive_hook import HiveMetastoreHook
     if '.' in table:
         schema, table = table.split('.')
     hh = HiveMetastoreHook(metastore_conn_id=metastore_conn_id)
@@ -100,7 +100,7 @@ def closest_ds_partition(
     >>> closest_ds_partition(tbl, '2015-01-02')
     '2015-01-01'
     """
-    from airflow.hooks.hive_hooks import HiveMetastoreHook
+    from airflow.hooks.hive_hook import HiveMetastoreHook
     if '.' in table:
         schema, table = table.split('.')
     hh = HiveMetastoreHook(metastore_conn_id=metastore_conn_id)
diff --git a/airflow/models.py b/airflow/models.py
index 74555902a5..9f46563d6a 100755
--- a/airflow/models.py
+++ b/airflow/models.py
@@ -768,13 +768,13 @@ def get_hook(self):
                 from airflow.hooks.postgres_hook import PostgresHook
                 return PostgresHook(postgres_conn_id=self.conn_id)
             elif self.conn_type == 'hive_cli':
-                from airflow.hooks.hive_hooks import HiveCliHook
+                from airflow.hooks.hive_hook import HiveCliHook
                 return HiveCliHook(hive_cli_conn_id=self.conn_id)
             elif self.conn_type == 'presto':
                 from airflow.hooks.presto_hook import PrestoHook
                 return PrestoHook(presto_conn_id=self.conn_id)
             elif self.conn_type == 'hiveserver2':
-                from airflow.hooks.hive_hooks import HiveServer2Hook
+                from airflow.hooks.hive_hook import HiveServer2Hook
                 return HiveServer2Hook(hiveserver2_conn_id=self.conn_id)
             elif self.conn_type == 'sqlite':
                 from airflow.hooks.sqlite_hook import SqliteHook
diff --git a/airflow/operators/hive_operator.py 
b/airflow/operators/hive_operator.py
index cf3f655e59..de96948348 100644
--- a/airflow/operators/hive_operator.py
+++ b/airflow/operators/hive_operator.py
@@ -21,7 +21,7 @@
 
 import re
 
-from airflow.hooks.hive_hooks import HiveCliHook
+from airflow.hooks.hive_hook import HiveCliHook
 from airflow import configuration
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/hive_stats_operator.py 
b/airflow/operators/hive_stats_operator.py
index b0bb874956..d506a6a074 100644
--- a/airflow/operators/hive_stats_operator.py
+++ b/airflow/operators/hive_stats_operator.py
@@ -24,7 +24,7 @@
 from airflow.exceptions import AirflowException
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.hooks.presto_hook import PrestoHook
-from airflow.hooks.hive_hooks import HiveMetastoreHook
+from airflow.hooks.hive_hook import HiveMetastoreHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/operators/hive_to_druid.py 
b/airflow/operators/hive_to_druid.py
index 8d5ed0361a..1c054b4794 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -17,7 +17,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-from airflow.hooks.hive_hooks import HiveCliHook, HiveMetastoreHook
+from airflow.hooks.hive_hook import HiveCliHook, HiveMetastoreHook
 from airflow.hooks.druid_hook import DruidHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/hive_to_mysql.py 
b/airflow/operators/hive_to_mysql.py
index 882a9d8540..e6bf585a88 100644
--- a/airflow/operators/hive_to_mysql.py
+++ b/airflow/operators/hive_to_mysql.py
@@ -19,7 +19,7 @@
 
 from tempfile import NamedTemporaryFile
 
-from airflow.hooks.hive_hooks import HiveServer2Hook
+from airflow.hooks.hive_hook import HiveServer2Hook
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/hive_to_samba_operator.py 
b/airflow/operators/hive_to_samba_operator.py
index 7963524a10..378542ee0e 100644
--- a/airflow/operators/hive_to_samba_operator.py
+++ b/airflow/operators/hive_to_samba_operator.py
@@ -19,7 +19,7 @@
 
 import tempfile
 
-from airflow.hooks.hive_hooks import HiveServer2Hook
+from airflow.hooks.hive_hook import HiveServer2Hook
 from airflow.hooks.samba_hook import SambaHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/mssql_to_hive.py 
b/airflow/operators/mssql_to_hive.py
index 4dff9ebbb9..580db8af16 100644
--- a/airflow/operators/mssql_to_hive.py
+++ b/airflow/operators/mssql_to_hive.py
@@ -23,7 +23,7 @@
 from tempfile import NamedTemporaryFile
 import pymssql
 
-from airflow.hooks.hive_hooks import HiveCliHook
+from airflow.hooks.hive_hook import HiveCliHook
 from airflow.hooks.mssql_hook import MsSqlHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/mysql_to_hive.py 
b/airflow/operators/mysql_to_hive.py
index 94d660801f..1a4a321adf 100644
--- a/airflow/operators/mysql_to_hive.py
+++ b/airflow/operators/mysql_to_hive.py
@@ -23,7 +23,7 @@
 from tempfile import NamedTemporaryFile
 import MySQLdb
 
-from airflow.hooks.hive_hooks import HiveCliHook
+from airflow.hooks.hive_hook import HiveCliHook
 from airflow.hooks.mysql_hook import MySqlHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
diff --git a/airflow/operators/s3_to_hive_operator.py 
b/airflow/operators/s3_to_hive_operator.py
index 228470fad7..2aa3a69499 100644
--- a/airflow/operators/s3_to_hive_operator.py
+++ b/airflow/operators/s3_to_hive_operator.py
@@ -28,7 +28,7 @@
 
 from airflow.exceptions import AirflowException
 from airflow.hooks.S3_hook import S3Hook
-from airflow.hooks.hive_hooks import HiveCliHook
+from airflow.hooks.hive_hook import HiveCliHook
 from airflow.models import BaseOperator
 from airflow.utils.decorators import apply_defaults
 from airflow.utils.compression import uncompress_file
diff --git a/airflow/sensors/hive_partition_sensor.py 
b/airflow/sensors/hive_partition_sensor.py
index ca10c863b4..7192745648 100644
--- a/airflow/sensors/hive_partition_sensor.py
+++ b/airflow/sensors/hive_partition_sensor.py
@@ -68,7 +68,7 @@ def poke(self, context):
             'Poking for table {self.schema}.{self.table}, '
             'partition {self.partition}'.format(**locals()))
         if not hasattr(self, 'hook'):
-            from airflow.hooks.hive_hooks import HiveMetastoreHook
+            from airflow.hooks.hive_hook import HiveMetastoreHook
             self.hook = HiveMetastoreHook(
                 metastore_conn_id=self.metastore_conn_id)
         return self.hook.check_for_partition(
diff --git a/airflow/sensors/named_hive_partition_sensor.py 
b/airflow/sensors/named_hive_partition_sensor.py
index 4a076a3dd6..481b69ce9c 100644
--- a/airflow/sensors/named_hive_partition_sensor.py
+++ b/airflow/sensors/named_hive_partition_sensor.py
@@ -83,7 +83,7 @@ def parse_partition_name(partition):
 
     def poke_partition(self, partition):
         if not self.hook:
-            from airflow.hooks.hive_hooks import HiveMetastoreHook
+            from airflow.hooks.hive_hook import HiveMetastoreHook
             self.hook = HiveMetastoreHook(
                 metastore_conn_id=self.metastore_conn_id)
 
diff --git a/docs/code.rst b/docs/code.rst
index 996f702a0e..946b73f2ac 100644
--- a/docs/code.rst
+++ b/docs/code.rst
@@ -387,7 +387,7 @@ interface when possible and acting as building blocks for 
operators.
 
 .. autoclass:: airflow.hooks.dbapi_hook.DbApiHook
 .. autoclass:: airflow.hooks.docker_hook.DockerHook
-.. automodule:: airflow.hooks.hive_hooks
+.. automodule:: airflow.hooks.hive_hook
     :members:
       HiveCliHook,
       HiveMetastoreHook,
diff --git a/tests/contrib/operators/test_hive_to_dynamodb_operator.py 
b/tests/contrib/operators/test_hive_to_dynamodb_operator.py
index ab86e05517..5135a7817a 100644
--- a/tests/contrib/operators/test_hive_to_dynamodb_operator.py
+++ b/tests/contrib/operators/test_hive_to_dynamodb_operator.py
@@ -63,7 +63,7 @@ def test_get_conn_returns_a_boto3_connection(self):
         hook = AwsDynamoDBHook(aws_conn_id='aws_default')
         self.assertIsNotNone(hook.get_conn())
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
+    @mock.patch('airflow.hooks.hive_hook.HiveServer2Hook.get_pandas_df',
                 return_value=pd.DataFrame(data=[('1', 'sid')], columns=['id', 
'name']))
     @unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not 
present')
     @mock_dynamodb2
@@ -103,7 +103,7 @@ def test_get_records_with_schema(self, get_results_mock):
             'table_exists').wait(TableName='test_airflow')
         self.assertEqual(table.item_count, 1)
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
+    @mock.patch('airflow.hooks.hive_hook.HiveServer2Hook.get_pandas_df',
                 return_value=pd.DataFrame(data=[('1', 'sid'), ('1', 'gupta')], 
columns=['id', 'name']))
     @unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not 
present')
     @mock_dynamodb2
diff --git a/tests/hooks/test_hive_hook.py b/tests/hooks/test_hive_hook.py
index 1cac74c6cd..548a4e6cd7 100644
--- a/tests/hooks/test_hive_hook.py
+++ b/tests/hooks/test_hive_hook.py
@@ -31,7 +31,7 @@
 
 from airflow import DAG, configuration
 from airflow.exceptions import AirflowException
-from airflow.hooks.hive_hooks import HiveCliHook, HiveMetastoreHook, 
HiveServer2Hook
+from airflow.hooks.hive_hook import HiveCliHook, HiveMetastoreHook, 
HiveServer2Hook
 from airflow.operators.hive_operator import HiveOperator
 from airflow.utils import timezone
 from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING
@@ -128,7 +128,7 @@ def test_run_cli_with_hive_conf(self):
         del os.environ[execution_date_ctx_var_name]
         del os.environ[dag_run_id_ctx_var_name]
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.run_cli')
+    @mock.patch('airflow.hooks.hive_hook.HiveCliHook.run_cli')
     def test_load_file(self, mock_run_cli):
         filepath = "/path/to/input/file"
         table = "output_table"
@@ -143,7 +143,7 @@ def test_load_file(self, mock_run_cli):
         )
         mock_run_cli.assert_called_with(query)
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.load_file')
+    @mock.patch('airflow.hooks.hive_hook.HiveCliHook.load_file')
     @mock.patch('pandas.DataFrame.to_csv')
     def test_load_df(self, mock_to_csv, mock_load_file):
         df = pd.DataFrame({"c": ["foo", "bar", "baz"]})
@@ -170,7 +170,7 @@ def test_load_df(self, mock_to_csv, mock_load_file):
         self.assertTrue(isinstance(kwargs["field_dict"], OrderedDict))
         self.assertEqual(kwargs["table"], table)
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.load_file')
+    @mock.patch('airflow.hooks.hive_hook.HiveCliHook.load_file')
     @mock.patch('pandas.DataFrame.to_csv')
     def test_load_df_with_optional_parameters(self, mock_to_csv, 
mock_load_file):
         hook = HiveCliHook()
@@ -187,7 +187,7 @@ def test_load_df_with_optional_parameters(self, 
mock_to_csv, mock_load_file):
             self.assertEqual(kwargs["create"], create)
             self.assertEqual(kwargs["recreate"], recreate)
 
-    @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.run_cli')
+    @mock.patch('airflow.hooks.hive_hook.HiveCliHook.run_cli')
     def test_load_df_with_data_types(self, mock_run_cli):
         d = OrderedDict()
         d['b'] = [True]
diff --git a/tests/operators/test_hive_operator.py 
b/tests/operators/test_hive_operator.py
index 7d8614c324..4cc45a5f0e 100644
--- a/tests/operators/test_hive_operator.py
+++ b/tests/operators/test_hive_operator.py
@@ -136,7 +136,7 @@ def test_mapred_job_name(self, mock_get_hook):
 
 if 'AIRFLOW_RUNALL_TESTS' in os.environ:
 
-    import airflow.hooks.hive_hooks
+    import airflow.hooks.hive_hook
     import airflow.operators.presto_to_mysql
 
     class HivePrestoTest(HiveEnvironmentTest):
diff --git a/tests/operators/test_operators.py 
b/tests/operators/test_operators.py
index 39a813316e..3571fdc2f7 100644
--- a/tests/operators/test_operators.py
+++ b/tests/operators/test_operators.py
@@ -434,7 +434,7 @@ def test_mysql_to_hive_tblproperties(self):
 
     @unittest.skipUnless('mysql' in configuration.conf.get('core', 
'sql_alchemy_conn'),
                          "This is a MySQL test")
-    @mock.patch('airflow.hooks.hive_hooks.HiveCliHook.load_file')
+    @mock.patch('airflow.hooks.hive_hook.HiveCliHook.load_file')
     def test_mysql_to_hive_type_conversion(self, mock_load_file):
         mysql_table = 'test_mysql_to_hive'
 
@@ -533,7 +533,7 @@ def test_mysql_to_hive_verify_loaded_values(self):
                 dag=self.dag)
             t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)
 
-            from airflow.hooks.hive_hooks import HiveServer2Hook
+            from airflow.hooks.hive_hook import HiveServer2Hook
             h = HiveServer2Hook()
             r = h.get_records("SELECT * FROM {}".format(hive_table))
             self.assertEqual(r[0], minmax)
diff --git a/tests/sensors/test_named_hive_partition_sensor.py 
b/tests/sensors/test_named_hive_partition_sensor.py
index 4fef3e0f34..c20676ef45 100644
--- a/tests/sensors/test_named_hive_partition_sensor.py
+++ b/tests/sensors/test_named_hive_partition_sensor.py
@@ -23,7 +23,7 @@
 from airflow import configuration, DAG, operators
 from airflow.sensors.named_hive_partition_sensor import 
NamedHivePartitionSensor
 from airflow.utils.timezone import datetime
-from airflow.hooks.hive_hooks import HiveMetastoreHook
+from airflow.hooks.hive_hook import HiveMetastoreHook
 
 configuration.load_test_config()
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> Rename a.h.hive_hooks to a.h.hive_hook
> --------------------------------------
>
>                 Key: AIRFLOW-2629
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-2629
>             Project: Apache Airflow
>          Issue Type: Improvement
>          Components: hive_hooks, hooks
>            Reporter: Kengo Seki
>            Priority: Minor
>
> As with AIRFLOW-2211, {{airflow.hooks.hive_hooks}} should be renamed to 
> {{airflow.hooks.hive_hook}} for consistency with other packages.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to