SameerMesiah97 commented on code in PR #62558:
URL: https://github.com/apache/airflow/pull/62558#discussion_r2937406337


##########
providers/apache/hive/tests/unit/apache/hive/sensors/test_metastore_partition_refactor.py:
##########
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from unittest import mock
+
+from airflow.providers.apache.hive.sensors.metastore_partition import 
MetastorePartitionSensor
+
+
+class TestMetastorePartitionSensor:
+    def test_init(self):

Review Comment:
   I don't think this test is needed as you are just testing that the 
constructor works. I would remove it.



##########
providers/apache/hive/tests/unit/apache/hive/sensors/test_metastore_partition_refactor.py:
##########
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from unittest import mock
+
+from airflow.providers.apache.hive.sensors.metastore_partition import 
MetastorePartitionSensor
+
+
+class TestMetastorePartitionSensor:
+    def test_init(self):
+        op = MetastorePartitionSensor(
+            task_id="test_task",
+            table="my_table",
+            partition_name="ds=2023-01-01",
+            schema="my_schema",
+            mysql_conn_id="my_conn",
+        )
+        assert op.table == "my_table"
+        assert op.partition_name == "ds=2023-01-01"
+        assert op.schema == "my_schema"
+        assert op.conn_id == "my_conn"
+        assert op.sql == ""  # Initial dummy value
+
+    @mock.patch("airflow.providers.common.sql.sensors.sql.SqlSensor._get_hook")
+    def test_poke_sql_construction(self, mock_get_hook):
+        op = MetastorePartitionSensor(
+            task_id="test_task", table="my_table", 
partition_name="ds=2023-01-01", schema="my_schema"
+        )
+
+        # Mock hook behavior
+        hook = mock.MagicMock()
+        mock_get_hook.return_value = hook
+        hook.get_records.return_value = [[1]]  # Return non-empty result
+
+        context = {"ds": "2023-01-01"}  # Dummy context
+        result = op.poke(context)

Review Comment:
   Is context needed here? Reading the execution path, it does not look it is 
ever used when `poke` is invoked. You are not setting it in the test below 
either. I would do `op.poke ({})` here as well.



##########
providers/apache/hive/tests/unit/apache/hive/sensors/test_metastore_partition_refactor.py:
##########
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from unittest import mock
+
+from airflow.providers.apache.hive.sensors.metastore_partition import 
MetastorePartitionSensor
+
+

Review Comment:
   There is no need to create a new test module and class for this PR when you 
have `test_metastore_partition.py`. Add your revised tests there. 



##########
providers/apache/hive/tests/unit/apache/hive/sensors/test_metastore_partition_refactor.py:
##########
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from unittest import mock
+
+from airflow.providers.apache.hive.sensors.metastore_partition import 
MetastorePartitionSensor
+
+
+class TestMetastorePartitionSensor:
+    def test_init(self):
+        op = MetastorePartitionSensor(
+            task_id="test_task",
+            table="my_table",
+            partition_name="ds=2023-01-01",
+            schema="my_schema",
+            mysql_conn_id="my_conn",
+        )
+        assert op.table == "my_table"
+        assert op.partition_name == "ds=2023-01-01"
+        assert op.schema == "my_schema"
+        assert op.conn_id == "my_conn"
+        assert op.sql == ""  # Initial dummy value
+
+    @mock.patch("airflow.providers.common.sql.sensors.sql.SqlSensor._get_hook")
+    def test_poke_sql_construction(self, mock_get_hook):
+        op = MetastorePartitionSensor(
+            task_id="test_task", table="my_table", 
partition_name="ds=2023-01-01", schema="my_schema"
+        )
+
+        # Mock hook behavior
+        hook = mock.MagicMock()
+        mock_get_hook.return_value = hook
+        hook.get_records.return_value = [[1]]  # Return non-empty result
+
+        context = {"ds": "2023-01-01"}  # Dummy context
+        result = op.poke(context)
+
+        assert result is True
+        # Verify SQL construction
+        expected_sql = """
+            SELECT 'X'
+            FROM PARTITIONS A0
+            LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
+            LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID
+            WHERE
+                B0.TBL_NAME = 'my_table' AND
+                C0.NAME = 'my_schema' AND
+                A0.PART_NAME = 'ds=2023-01-01';
+            """
+        # Normalize whitespace for comparison
+        assert " ".join(op.sql.split()) == " ".join(expected_sql.split())

Review Comment:
   Instead of trying to normalize whitespace, why not check key parts of the 
query like this:
   
   ```
   assert "PARTITIONS" in op.sql
   assert "B0.TBL_NAME = 'my_table'" in op.sql
   assert "A0.PART_NAME = 'ds=2023-01-01'" in op.sql
   ```
   
   What you have is far too brittle and not explicit enough.



##########
providers/apache/hive/tests/unit/apache/hive/sensors/test_metastore_partition_refactor.py:
##########
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from unittest import mock
+
+from airflow.providers.apache.hive.sensors.metastore_partition import 
MetastorePartitionSensor
+
+
+class TestMetastorePartitionSensor:
+    def test_init(self):
+        op = MetastorePartitionSensor(
+            task_id="test_task",
+            table="my_table",
+            partition_name="ds=2023-01-01",
+            schema="my_schema",
+            mysql_conn_id="my_conn",
+        )
+        assert op.table == "my_table"
+        assert op.partition_name == "ds=2023-01-01"
+        assert op.schema == "my_schema"
+        assert op.conn_id == "my_conn"
+        assert op.sql == ""  # Initial dummy value
+
+    @mock.patch("airflow.providers.common.sql.sensors.sql.SqlSensor._get_hook")
+    def test_poke_sql_construction(self, mock_get_hook):
+        op = MetastorePartitionSensor(
+            task_id="test_task", table="my_table", 
partition_name="ds=2023-01-01", schema="my_schema"
+        )
+
+        # Mock hook behavior
+        hook = mock.MagicMock()
+        mock_get_hook.return_value = hook
+        hook.get_records.return_value = [[1]]  # Return non-empty result
+
+        context = {"ds": "2023-01-01"}  # Dummy context
+        result = op.poke(context)
+
+        assert result is True
+        # Verify SQL construction
+        expected_sql = """
+            SELECT 'X'
+            FROM PARTITIONS A0
+            LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
+            LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID
+            WHERE
+                B0.TBL_NAME = 'my_table' AND
+                C0.NAME = 'my_schema' AND
+                A0.PART_NAME = 'ds=2023-01-01';
+            """
+        # Normalize whitespace for comparison
+        assert " ".join(op.sql.split()) == " ".join(expected_sql.split())
+
+    @mock.patch("airflow.providers.common.sql.sensors.sql.SqlSensor._get_hook")
+    def test_poke_table_split(self, mock_get_hook):
+        op = MetastorePartitionSensor(
+            task_id="test_task", table="other_schema.other_table", 
partition_name="ds=2023-01-01"
+        )
+
+        hook = mock.MagicMock()
+        mock_get_hook.return_value = hook
+        hook.get_records.return_value = []  # Return empty result
+
+        result = op.poke({})
+
+        assert result is False
+        assert op.schema == "other_schema"
+        assert op.table == "other_table"
+
+        expected_sql = """
+            SELECT 'X'
+            FROM PARTITIONS A0
+            LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
+            LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID
+            WHERE
+                B0.TBL_NAME = 'other_table' AND
+                C0.NAME = 'other_schema' AND
+                A0.PART_NAME = 'ds=2023-01-01';
+            """
+        assert " ".join(op.sql.split()) == " ".join(expected_sql.split())

Review Comment:
   Above comment applies here too.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to