This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 48eccb8f96 Add more checks for trigger module (#31278)
48eccb8f96 is described below
commit 48eccb8f967ac559a33dd639bf2c50e0288eb1a8
Author: Pankaj Singh <[email protected]>
AuthorDate: Sun May 21 13:45:31 2023 +0530
Add more checks for trigger module (#31278)
* Add more checks for trigger module
Let's enable more checks for provider triggers modules.
In this PR, enable the following check
- Make sure the trigger class end with Trigger
- Trigger class if in providers.yaml then make sure it exists in module
- Add a test provider_manager.trigger return non-zero class
---------
Co-authored-by: Tzu-ping Chung <[email protected]>
---
scripts/in_container/run_provider_yaml_files_check.py | 17 +++++++++++++++++
scripts/in_container/verify_providers.py | 15 +++++++++++++++
tests/always/test_providers_manager.py | 5 +++++
3 files changed, 37 insertions(+)
diff --git a/scripts/in_container/run_provider_yaml_files_check.py
b/scripts/in_container/run_provider_yaml_files_check.py
index 40bf38e50f..1f1dd675ac 100755
--- a/scripts/in_container/run_provider_yaml_files_check.py
+++ b/scripts/in_container/run_provider_yaml_files_check.py
@@ -306,6 +306,22 @@ def check_hook_classes(yaml_files: dict[str, dict]):
)
+def check_trigger_classes(yaml_files: dict[str, dict]):
+ print("Checking triggers classes belong to package, exist and are classes")
+ resource_type = "triggers"
+ for yaml_file_path, provider_data in yaml_files.items():
+ provider_package =
pathlib.Path(yaml_file_path).parent.as_posix().replace("/", ".")
+ trigger_classes = {
+ name
+ for trigger_class in provider_data.get(resource_type, {})
+ for name in trigger_class["class-names"]
+ }
+ if trigger_classes:
+ check_if_objects_exist_and_belong_to_package(
+ trigger_classes, provider_package, yaml_file_path,
resource_type, ObjectType.CLASS
+ )
+
+
def check_plugin_classes(yaml_files: dict[str, dict]):
print("Checking plugin classes belong to package, exist and are classes")
resource_type = "plugins"
@@ -509,6 +525,7 @@ if __name__ == "__main__":
check_hook_classes(all_parsed_yaml_files)
check_plugin_classes(all_parsed_yaml_files)
check_extra_link_classes(all_parsed_yaml_files)
+ check_trigger_classes(all_parsed_yaml_files)
check_correctness_of_list_of_sensors_operators_hook_modules(all_parsed_yaml_files)
check_unique_provider_name(all_parsed_yaml_files)
check_providers_have_all_documentation_files(all_parsed_yaml_files)
diff --git a/scripts/in_container/verify_providers.py
b/scripts/in_container/verify_providers.py
index 4da365b916..741850f173 100755
--- a/scripts/in_container/verify_providers.py
+++ b/scripts/in_container/verify_providers.py
@@ -52,6 +52,7 @@ class EntityType(Enum):
Sensors = "Sensors"
Hooks = "Hooks"
Secrets = "Secrets"
+ Trigger = "Trigger"
class EntityTypeSummary(NamedTuple):
@@ -82,6 +83,7 @@ ENTITY_NAMES = {
EntityType.Sensors: "Sensors",
EntityType.Hooks: "Hooks",
EntityType.Secrets: "Secrets",
+ EntityType.Trigger: "Trigger",
}
TOTALS: dict[EntityType, int] = {
@@ -90,6 +92,7 @@ TOTALS: dict[EntityType, int] = {
EntityType.Sensors: 0,
EntityType.Transfers: 0,
EntityType.Secrets: 0,
+ EntityType.Trigger: 0,
}
OPERATORS_PATTERN = r".*Operator$"
@@ -98,6 +101,7 @@ HOOKS_PATTERN = r".*Hook$"
SECRETS_PATTERN = r".*Backend$"
TRANSFERS_PATTERN = r".*To[A-Z0-9].*Operator$"
WRONG_TRANSFERS_PATTERN = r".*Transfer$|.*TransferOperator$"
+TRIGGER_PATTERN = r".*Trigger$"
ALL_PATTERNS = {
OPERATORS_PATTERN,
@@ -106,6 +110,7 @@ ALL_PATTERNS = {
SECRETS_PATTERN,
TRANSFERS_PATTERN,
WRONG_TRANSFERS_PATTERN,
+ TRIGGER_PATTERN,
}
EXPECTED_SUFFIXES: dict[EntityType, str] = {
@@ -114,6 +119,7 @@ EXPECTED_SUFFIXES: dict[EntityType, str] = {
EntityType.Sensors: "Sensor",
EntityType.Secrets: "Backend",
EntityType.Transfers: "Operator",
+ EntityType.Trigger: "Trigger",
}
@@ -549,6 +555,7 @@ def get_package_class_summary(
from airflow.models.baseoperator import BaseOperator
from airflow.secrets import BaseSecretsBackend
from airflow.sensors.base import BaseSensorOperator
+ from airflow.triggers.base import BaseTrigger
all_verified_entities: dict[EntityType, VerifiedEntities] = {
EntityType.Operators: find_all_entities(
@@ -601,6 +608,14 @@ def get_package_class_summary(
expected_class_name_pattern=TRANSFERS_PATTERN,
unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN,
TRANSFERS_PATTERN},
),
+ EntityType.Trigger: find_all_entities(
+ imported_classes=imported_classes,
+ base_package=full_package_name,
+ sub_package_pattern_match=r".*\.triggers\..*",
+ ancestor_match=BaseTrigger,
+ expected_class_name_pattern=TRIGGER_PATTERN,
+ unexpected_class_name_patterns=ALL_PATTERNS - {TRIGGER_PATTERN},
+ ),
}
for entity in EntityType:
print_wrong_naming(entity,
all_verified_entities[entity].wrong_entities)
diff --git a/tests/always/test_providers_manager.py
b/tests/always/test_providers_manager.py
index b18eb22071..a4f8acf0d8 100644
--- a/tests/always/test_providers_manager.py
+++ b/tests/always/test_providers_manager.py
@@ -341,6 +341,11 @@ class TestProviderManager:
auth_backend_module_names =
list(provider_manager.auth_backend_module_names)
assert len(auth_backend_module_names) > 0
+ def test_trigger(self):
+ provider_manager = ProvidersManager()
+ trigger_class_names = list(provider_manager.trigger)
+ assert len(trigger_class_names) > 10
+
@patch("airflow.providers_manager.import_string")
def test_optional_feature_no_warning(self, mock_importlib_import_string):
with self._caplog.at_level(logging.WARNING):