This is an automated email from the ASF dual-hosted git repository.
taragolis pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new c5ac2d25ce fix PT012 in hive (#38501)
c5ac2d25ce is described below
commit c5ac2d25ce495e872c2d57c2c422533576fe2f0b
Author: rom sharon <[email protected]>
AuthorDate: Tue Mar 26 17:53:49 2024 +0200
fix PT012 in hive (#38501)
---
pyproject.toml | 2 --
tests/providers/apache/hive/hooks/test_hive.py | 2 +-
.../hive/sensors/test_named_hive_partition.py | 28 +++++++++++-----------
3 files changed, 15 insertions(+), 17 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 05a35a66cf..332a423160 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -376,8 +376,6 @@ combine-as-imports = true
"tests/providers/amazon/aws/transfers/test_redshift_to_s3.py" = ["PT012"]
"tests/providers/amazon/aws/triggers/test_ecs.py" = ["PT012"]
"tests/providers/amazon/aws/waiters/test_neptune.py" = ["PT012"]
-"tests/providers/apache/hive/hooks/test_hive.py" = ["PT012"]
-"tests/providers/apache/hive/sensors/test_named_hive_partition.py" = ["PT012"]
"tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py" =
["PT012"]
"tests/providers/cncf/kubernetes/hooks/test_kubernetes.py" = ["PT012"]
"tests/providers/cncf/kubernetes/operators/test_pod.py" = ["PT012"]
diff --git a/tests/providers/apache/hive/hooks/test_hive.py
b/tests/providers/apache/hive/hooks/test_hive.py
index 027ad8c688..b69f68b149 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -115,8 +115,8 @@ class TestHiveCliHook:
)
def test_hive_cli_hook_invalid_schema(self):
+ hook = InvalidHiveCliHook()
with pytest.raises(RuntimeError) as error:
- hook = InvalidHiveCliHook()
hook.run_cli("SHOW DATABASES")
assert str(error.value) == "The schema `default;` contains invalid
characters: ;"
diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py
b/tests/providers/apache/hive/sensors/test_named_hive_partition.py
index b75c25e6a2..4f867b45fe 100644
--- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py
+++ b/tests/providers/apache/hive/sensors/test_named_hive_partition.py
@@ -166,19 +166,19 @@ class TestPartitions(TestHiveEnvironment):
assert name[2] == "part1=this.can.be.an.issue/part2=ok"
def test_times_out_on_nonexistent_partition(self):
+ mock_hive_metastore_hook = MockHiveMetastoreHook()
+ mock_hive_metastore_hook.check_for_named_partition =
mock.MagicMock(return_value=False)
+
+ op = NamedHivePartitionSensor(
+ task_id="hive_partition_check",
+ partition_names=[
+ "airflow.static_babynames_partitioned/ds={{ds}}",
+ "airflow.static_babynames_partitioned/ds=nonexistent",
+ ],
+ poke_interval=0.1,
+ timeout=1,
+ dag=self.dag,
+ hook=mock_hive_metastore_hook,
+ )
with pytest.raises(AirflowSensorTimeout):
- mock_hive_metastore_hook = MockHiveMetastoreHook()
- mock_hive_metastore_hook.check_for_named_partition =
mock.MagicMock(return_value=False)
-
- op = NamedHivePartitionSensor(
- task_id="hive_partition_check",
- partition_names=[
- "airflow.static_babynames_partitioned/ds={{ds}}",
- "airflow.static_babynames_partitioned/ds=nonexistent",
- ],
- poke_interval=0.1,
- timeout=1,
- dag=self.dag,
- hook=mock_hive_metastore_hook,
- )
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE,
ignore_ti_state=True)