potiuk commented on a change in pull request #12466:
URL: https://github.com/apache/airflow/pull/12466#discussion_r531554294



##########
File path: airflow/providers_manager.py
##########
@@ -36,52 +39,231 @@
 
 
 def _create_validator():
+    """Creates JSON schema validator from the provider.yaml.schema.json"""
     schema = json.loads(importlib_resources.read_text('airflow', 
'provider.yaml.schema.json'))
     cls = jsonschema.validators.validator_for(schema)
     validator = cls(schema)
     return validator
 
 
 class ProvidersManager:
-    """Manages all provider packages."""
+    """
+    Manages all provider packages. This is a Singleton class. The first time 
it is
+    instantiated, it discovers all available providers in installed packages 
and
+    local source folders (if airflow is run from sources).
+    """
+
+    _instance = None
+    resource_version = "0"
+
+    def __new__(cls):
+        if cls._instance is None:
+            cls._instance = super().__new__(cls)
+        return cls._instance
 
     def __init__(self):
-        self._provider_directory = {}
-        try:
-            from airflow import providers
-        except ImportError as e:
-            log.warning("No providers are present or error when importing 
them! :%s", e)
-            return
+        # Keeps dict of providers keyed by module name and value is Tuple: 
version, provider_info
+        self._provider_dict: Dict[str, Tuple[str, Dict]] = {}
+        # Keeps dict of hooks keyed by connection type and value is
+        # Tuple: connection class, connection_id_attribute_name
+        self._hooks_dict: Dict[str, Tuple[str, str]] = {}
         self._validator = _create_validator()
-        self.__find_all_providers(providers.__path__)
+        # Local source folders are loaded first. They should take precedence 
over the package ones for
+        # Development purpose. In production provider.yaml files are not 
present in the 'airflow" directory
+        # So there is no risk we are going to override package provider 
accidentally. This can only happen
+        # in case of local development
+        self._discover_all_airflow_builtin_providers_from_local_sources()
+        self._discover_all_providers_from_packages()
+        self._discover_hooks()
+        self._sort_provider_dictionary()
+        self._sort_hooks_dictionary()
 
-    def __find_all_providers(self, paths: str):
-        def onerror(_):
-            exception_string = traceback.format_exc()
-            log.warning(exception_string)
+    def _sort_hooks_dictionary(self):
+        """
+        Creates provider_directory as sorted (by package_name) OrderedDict.
 
-        for module_info in pkgutil.walk_packages(paths, 
prefix="airflow.providers.", onerror=onerror):
+        Duplicates are removed from "package" providers in case corresponding 
"folder" provider is found.
+        The "folder" providers are from local sources (packages do not contain 
provider.yaml files),
+        so if someone has airflow installed from local sources, the providers 
are imported from there
+        first so, provider information should be taken from there.
+        :return:
+        """
+        sorted_dict = OrderedDict()
+        for connection_type in sorted(self._hooks_dict.keys()):
+            sorted_dict[connection_type] = self._hooks_dict[connection_type]
+        self._hooks_dict = sorted_dict
+
+    def _sort_provider_dictionary(self):
+        """
+        Sort provider_dictionary using OrderedDict.
+
+        The dictionary gets sorted so that when you iterate through it, the 
providers are by
+        default returned in alphabetical order.
+        """
+        sorted_dict = OrderedDict()
+        for provider_name in sorted(self._provider_dict.keys()):
+            sorted_dict[provider_name] = self._provider_dict[provider_name]
+        self._provider_dict = sorted_dict
+
+    def _discover_all_providers_from_packages(self) -> None:
+        """
+        Discovers all providers by scanning packages installed. The list of 
providers should be returned
+        via the 'apache_airflow_provider' entrypoint as a dictionary 
conforming to the
+        'airflow/provider.yaml.schema.json' schema.
+        """
+        for entry_point in 
pkg_resources.iter_entry_points('apache_airflow_provider'):
+            package_name = entry_point.dist.project_name
+            log.debug("Loading %s from package %s", entry_point, package_name)
+            version = entry_point.dist.version
             try:
-                imported_module = importlib.import_module(module_info.name)
-            except Exception as e:  # noqa pylint: disable=broad-except
-                log.warning("Error when importing %s:%s", module_info.name, e)
+                provider_info = entry_point.load()()
+            except pkg_resources.VersionConflict as e:
+                log.warning(
+                    "The provider package %s could not be registered because 
of version conflict : %s",
+                    package_name,
+                    e,
+                )
                 continue
-            try:
-                provider = importlib_resources.read_text(imported_module, 
'provider.yaml')
-                provider_info = yaml.safe_load(provider)
-                self._validator.validate(provider_info)
-                self._provider_directory[provider_info['package-name']] = 
provider_info
-            except FileNotFoundError:
-                # This is OK - this is not a provider package
-                pass
-            except TypeError as e:
-                if "is not a package" not in str(e):
-                    log.warning("Error when loading 'provider.yaml' file from 
%s:%s}", module_info.name, e)
-                # Otherwise this is OK - this is likely a module
-            except Exception as e:  # noqa pylint: disable=broad-except
-                log.warning("Error when loading 'provider.yaml' file from 
%s:%s", module_info.name, e)
+            self._validator.validate(provider_info)
+            provider_info_package_name = provider_info['package-name']
+            if package_name != provider_info_package_name:
+                raise Exception(
+                    f"The package '{package_name}' from setuptools and "
+                    f"{provider_info_package_name} do not match. Please make 
sure they are aligned"
+                )
+            if package_name not in self._provider_dict:
+                self._provider_dict[package_name] = (version, provider_info)
+            else:
+                log.warning(
+                    "The provider for package '%s' could not be registered 
from because providers for that "
+                    "package name have already been registered",
+                    package_name,
+                )
+
+    def _discover_all_airflow_builtin_providers_from_local_sources(self) -> 
None:
+        """
+        Finds all built-in airflow providers if airflow is run from the local 
sources.
+        It finds `provider.yaml` files for all such providers and registers 
the providers using those.
+
+        This 'provider.yaml' scanning takes precedence over scanning packages 
installed
+        in case you have both sources and packages installed, the providers 
will be loaded from
+        the "airflow" sources rather than from the packages.
+        """
+        try:
+            import airflow.providers
+        except ImportError:
+            log.info("You have no providers installed.")
+            return
+        try:
+            for path in airflow.providers.__path__:
+                self._add_provider_info_from_local_source_files_on_path(path)
+        except Exception as e:  # noqa pylint: disable=broad-except
+            log.warning("Error when loading 'provider.yaml' files from airflow 
sources: %s", e)
+
+    def _add_provider_info_from_local_source_files_on_path(self, path) -> None:
+        """
+        Finds all the provider.yaml files in the directory specified.
+
+        :param path: path where to look for provider.yaml files
+        """
+        root_path = path
+        for folder, subdirs, files in os.walk(path, topdown=True):
+            for filename in fnmatch.filter(files, "provider.yaml"):
+                package_name = "apache-airflow-providers" + 
folder[len(root_path) :].replace(os.sep, "-")
+                
self._add_provider_info_from_local_source_file(os.path.join(folder, filename), 
package_name)
+                subdirs[:] = []
+
+    def _add_provider_info_from_local_source_file(self, path, package_name) -> 
None:
+        """
+        Parses found provider.yaml file and adds found provider to the 
dictionary.
+
+        :param path: full file path of the provider.yaml file
+        :param package_name: name of the package
+        """
+        try:
+            log.debug("Loading %s from %s", package_name, path)
+            with open(path) as provider_yaml_file:
+                provider_info = yaml.safe_load(provider_yaml_file)
+            self._validator.validate(provider_info)
+
+            version = provider_info['versions'][0]
+            if package_name not in self._provider_dict:
+                self._provider_dict[package_name] = (version, provider_info)
+            else:
+                log.warning(
+                    "The providers for package '%s' could not be registered 
because providers for that "
+                    "package name have already been registered",
+                    package_name,
+                )
+        except Exception as e:  # noqa pylint: disable=broad-except
+            log.warning("Error when loading '%s': %s", path, e)
+
+    def _discover_hooks(self) -> None:
+        """Retrieves all connections defined in the providers"""
+        for name, provider in self._provider_dict.items():
+            provider_package = name
+            hook_class_names = provider[1].get("hook-class-names")
+            if hook_class_names:
+                for hook_class_name in hook_class_names:
+                    self._add_hook(hook_class_name, provider_package)
+
+    def _add_hook(self, hook_class_name, provider_package) -> None:
+        """
+        Adds hook class name to list of hooks
+
+        :param hook_class_name: name of the Hook class
+        :param provider_package: provider package adding the hook
+        """
+        if provider_package.startswith("apache-airflow"):
+            provider_path = provider_package[len("apache-") :].replace("-", 
".")
+            if not hook_class_name.startswith(provider_path):
+                log.warning(
+                    "Sanity check failed when importing '%s' from '%s' 
package. It should start with '%s'",
+                    hook_class_name,
+                    provider_package,
+                    provider_path,
+                )
+                return
+        if hook_class_name in self._hooks_dict:
+            log.warning(
+                "The hook_class '%s' has been already registered.",
+                hook_class_name,
+            )
+            return
+        try:
+            module, class_name = hook_class_name.rsplit('.', maxsplit=1)
+            hook_class = getattr(importlib.import_module(module), class_name)
+        except Exception as e:  # noqa pylint: disable=broad-except
+            log.warning(
+                "Exception when importing '%s' from '%s' package: %s",
+                hook_class_name,
+                provider_package,
+                e,
+            )
+            return
+        conn_type = getattr(hook_class, 'conn_type')
+        if not conn_type:
+            log.warning(
+                "The hook_class '%s' misses connection_type attribute and 
cannot be registered",
+                hook_class,
+            )
+            return
+        connection_if_attribute_name = getattr(hook_class, 'conn_name_attr')
+        if not connection_if_attribute_name:
+            log.warning(
+                "The hook_class '%s' misses conn_name_attr attribute and 
cannot be registered",
+                hook_class,
+            )
+            return
+
+        self._hooks_dict[conn_type] = (hook_class_name, 
connection_if_attribute_name)

Review comment:
       HAppy to change it here anyway.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to