Repository: incubator-airflow
Updated Branches:
  refs/heads/master 7cba83333 -> 0f9f4605f


[AIRFLOW-2186] Change the way logging is carried out in few ops

- Changed the way logging is implemented in
`PostgresToGoogleCloudStorageOperator` and
`HiveToDynamoDBTransferOperator`.  Changed
`logging.info` to `self.log.info`

Closes #3106 from kaxil/AIRFLOW-2186


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/0f9f4605
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/0f9f4605
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/0f9f4605

Branch: refs/heads/master
Commit: 0f9f4605f6dffb1722447156b7dab6d875e4eac2
Parents: 7cba833
Author: Kaxil Naik <kaxiln...@gmail.com>
Authored: Wed Mar 7 10:28:03 2018 +0100
Committer: Fokko Driesprong <fokkodriespr...@godatadriven.com>
Committed: Wed Mar 7 10:28:03 2018 +0100

----------------------------------------------------------------------
 airflow/contrib/operators/hive_to_dynamodb.py      | 17 ++++++++++-------
 .../contrib/operators/postgres_to_gcs_operator.py  |  3 +--
 2 files changed, 11 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/0f9f4605/airflow/contrib/operators/hive_to_dynamodb.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/hive_to_dynamodb.py 
b/airflow/contrib/operators/hive_to_dynamodb.py
index 55eca45..5c7bb8e 100644
--- a/airflow/contrib/operators/hive_to_dynamodb.py
+++ b/airflow/contrib/operators/hive_to_dynamodb.py
@@ -13,7 +13,6 @@
 # limitations under the License.
 
 import json
-import logging
 
 from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook
 from airflow.hooks.hive_hooks import HiveServer2Hook
@@ -82,20 +81,24 @@ class HiveToDynamoDBTransferOperator(BaseOperator):
     def execute(self, context):
         hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id)
 
-        logging.info('Extracting data from Hive')
-        logging.info(self.sql)
+        self.log.info('Extracting data from Hive')
+        self.log.info(self.sql)
 
         data = hive.get_pandas_df(self.sql, schema=self.schema)
         dynamodb = AwsDynamoDBHook(aws_conn_id=self.aws_conn_id,
-                                   table_name=self.table_name, 
table_keys=self.table_keys, region_name=self.region_name)
+                                   table_name=self.table_name,
+                                   table_keys=self.table_keys,
+                                   region_name=self.region_name)
 
-        logging.info('Inserting rows into dynamodb')
+        self.log.info('Inserting rows into dynamodb')
 
         if self.pre_process is None:
             dynamodb.write_batch_data(
                 json.loads(data.to_json(orient='records')))
         else:
             dynamodb.write_batch_data(
-                self.pre_process(data=data, args=self.pre_process_args, 
kwargs=self.pre_process_kwargs))
+                self.pre_process(data=data,
+                                 args=self.pre_process_args,
+                                 kwargs=self.pre_process_kwargs))
 
-        logging.info('Done.')
+        self.log.info('Done.')

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/0f9f4605/airflow/contrib/operators/postgres_to_gcs_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/postgres_to_gcs_operator.py 
b/airflow/contrib/operators/postgres_to_gcs_operator.py
index 441ccf5..ab6fdf4 100644
--- a/airflow/contrib/operators/postgres_to_gcs_operator.py
+++ b/airflow/contrib/operators/postgres_to_gcs_operator.py
@@ -14,7 +14,6 @@
 
 import sys
 import json
-import logging
 import time
 import datetime
 
@@ -176,7 +175,7 @@ class PostgresToGoogleCloudStorageOperator(BaseOperator):
                 'mode': field_mode,
             })
 
-        logging.info('Using schema for %s: %s', self.schema_filename, schema)
+        self.log.info('Using schema for %s: %s', self.schema_filename, schema)
         tmp_schema_file_handle = NamedTemporaryFile(delete=True)
         s = json.dumps(schema, sort_keys=True)
         if PY3:

Reply via email to