This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-stable
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/v1-10-stable by this push:
     new df0ee79  Created rule for SparkJDBCOperator class conn_id (#13798)
df0ee79 is described below

commit df0ee79d9fd489e97bf3caf1f0e0259646bfb9ec
Author: Dr. Dennis Akpenyi <[email protected]>
AuthorDate: Tue Feb 2 12:26:43 2021 +0100

    Created rule for SparkJDBCOperator class conn_id (#13798)
    
    This PR implements an upgrade check for SparkJDBCOperator.
    
    In Airflow 1.10.x, the default name for SparkJDBCOperator class conn_id
    is 'spark-default'. From Airflow 2.0, it has been changed to
    'spark_default' to conform with the naming conventions of all
    other connection names.
    
    closes: #12918
---
 .../rules/spark_jdbc_operator_conn_id_rule.py      | 44 +++++++++++++++
 ...park_jdbc_operator_conn_id_default_name_rule.py | 62 ++++++++++++++++++++++
 2 files changed, 106 insertions(+)

diff --git a/airflow/upgrade/rules/spark_jdbc_operator_conn_id_rule.py 
b/airflow/upgrade/rules/spark_jdbc_operator_conn_id_rule.py
new file mode 100644
index 0000000..f439408
--- /dev/null
+++ b/airflow/upgrade/rules/spark_jdbc_operator_conn_id_rule.py
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.models import Connection
+from airflow.upgrade.rules.base_rule import BaseRule
+from airflow.utils.db import provide_session
+
+
+class SparkJDBCOperatorConnIdRule(BaseRule):
+    title = "Check Spark JDBC Operator default connection name"
+
+    description = """\
+In Airflow 1.10.x, the default value for SparkJDBCOperator class 'conn_id' is 
'spark-default'.
+From Airflow 2.0, it has been changed to 'spark_default' to conform with the 
naming conventions
+of all other connection names.
+    """
+
+    @provide_session
+    def check(self, session=None):
+        for conn in session.query(Connection.conn_id):
+            if conn.conn_id == 'spark-default':
+                return (
+                    "Deprecation Warning: From Airflow 2.0, the default value 
of 'conn_id' argument of "
+                    "SparkJDBCOperator class has been changed to 
'spark_default' to conform with the naming "
+                    "conventions of all other connection names. Please rename 
the connection with "
+                    "id 'spark-default' to 'spark_default' or explicitly pass 
'spark-default' "
+                    "to the operator. See the link below for details: "
+                    "https://github.com/apache/airflow/blob/2.0.0/";
+                    "UPDATING.md#sparkjdbchook-default-connection"
+                )
diff --git 
a/tests/upgrade/rules/test_spark_jdbc_operator_conn_id_default_name_rule.py 
b/tests/upgrade/rules/test_spark_jdbc_operator_conn_id_default_name_rule.py
new file mode 100644
index 0000000..ac117e0
--- /dev/null
+++ b/tests/upgrade/rules/test_spark_jdbc_operator_conn_id_default_name_rule.py
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest import TestCase
+
+from airflow.models import Connection
+from airflow.upgrade.rules.spark_jdbc_operator_conn_id_rule import 
SparkJDBCOperatorConnIdRule
+from airflow.utils.db import create_session
+from tests.test_utils.db import clear_db_connections
+
+
+class TestSparkJDBCOperatorDefaultConnIdRule(TestCase):
+    def tearDown(self):
+        clear_db_connections()
+
+    def test_check(self):
+        rule = SparkJDBCOperatorConnIdRule()
+
+        assert isinstance(rule.title, str)
+        assert isinstance(rule.description, str)
+
+        with create_session() as session:
+            conn = Connection(conn_id='spark_default')
+            session.merge(conn)
+
+        msgs = rule.check(session=session)
+        assert msgs is None
+
+    def test_invalid_check(self):
+        rule = SparkJDBCOperatorConnIdRule()
+
+        assert isinstance(rule.title, str)
+        assert isinstance(rule.description, str)
+
+        with create_session() as session:
+            conn = Connection(conn_id='spark-default')
+            session.merge(conn)
+
+        expected = (
+            "Deprecation Warning: From Airflow 2.0, the default value of 
'conn_id' argument of "
+            "SparkJDBCOperator class has been changed to 'spark_default' to 
conform with the naming "
+            "conventions of all other connection names. Please rename the 
connection with "
+            "id 'spark-default' to 'spark_default' or explicitly pass 
'spark-default' "
+            "to the operator. See the link below for details: "
+            
"https://github.com/apache/airflow/blob/2.0.0/UPDATING.md#sparkjdbchook-default-connection";
+        )
+
+        assert expected == rule.check(session=session)

Reply via email to